Example usage for org.opencv.core Mat empty

List of usage examples for org.opencv.core Mat empty

Introduction

In this page you can find the example usage for org.opencv.core Mat empty.

Prototype

public boolean empty() 

Source Link

Usage

From source file:gov.nasa.jpl.memex.pooledtimeseries.PoT.java

License:Apache License

static ArrayList<double[][][]> getOpticalHistograms(Path filename, int w_d, int h_d, int o_d)
        throws PoTException {
    ArrayList<double[][][]> histograms = new ArrayList<double[][][]>();

    VideoCapture capture = new VideoCapture(filename.toString());

    if (!capture.isOpened()) {
        LOG.warning("video file " + filename.getFileName() + " could not be opened.");

        double[][][] hist = new double[w_d][h_d][o_d];
        histograms.add(hist);//from  ww  w  .j av a2 s.  c o m
    } else {
        // variables for processing images
        Mat original_frame = new Mat();

        Mat frame = new Mat();
        Mat frame_gray = new Mat();
        Mat prev_frame_gray = new Mat();
        MatOfPoint2f flow = new MatOfPoint2f();

        // computing a list of histogram of optical flows (i.e. a list of 5*5*8
        // arrays)
        for (int frame_index = 0;; frame_index++) {
            // capturing the video images
            capture.read(original_frame);

            if (original_frame.empty()) {
                if (original_frame.empty()) {
                    if (frame_index == 0) {
                        throw new PoTException("Could not read the video file");
                    } else
                        break;
                }
            } else {
                // resizing the captured frame and converting it to the gray scale
                // image.
                Imgproc.resize(original_frame, frame, new Size(frame_width, frame_height));
                Imgproc.cvtColor(frame, frame_gray, Imgproc.COLOR_BGR2GRAY);

                double[][][] hist = new double[w_d][h_d][o_d];
                histograms.add(hist);

                // from frame #2
                if (frame_index > 0) {
                    // calculate optical flows
                    Video.calcOpticalFlowFarneback(prev_frame_gray, frame_gray, flow, 0.5, 1, 10, 2, 7, 1.5, 0); // 0.5, 1, 15, 2, 7, 1.5, 0

                    // update histogram of optical flows
                    updateOpticalHistogram(histograms.get(frame_index), flow);
                }

                Mat temp_frame = prev_frame_gray;
                prev_frame_gray = frame_gray;
                frame_gray = temp_frame;
            }
        }

        capture.release();
    }

    return histograms;
}

From source file:gov.nasa.jpl.memex.pooledtimeseries.PoT.java

License:Apache License

static ArrayList<double[][][]> getGradientHistograms(Path filename, int w_d, int h_d, int o_d)
        throws PoTException {
    ArrayList<double[][][]> histograms = new ArrayList<double[][][]>();

    VideoCapture capture = new VideoCapture(filename.toString());

    if (!capture.isOpened()) {
        LOG.warning("video file not opened.");

        double[][][] hist = new double[w_d][h_d][o_d];
        histograms.add(hist);/* ww w .  j  av a  2 s  .  c o m*/
    } else {
        // variables for processing images
        Mat original_frame = new Mat();
        Mat resized = new Mat();
        Mat resized_gray = new Mat();

        // initializing a list of histogram of gradients (i.e. a list of s*s*9
        // arrays)
        for (int i = 0;; i++) {
            // capturing the video images
            capture.read(original_frame);
            if (original_frame.empty()) {
                if (original_frame.empty()) {
                    if (i == 0) {
                        throw new PoTException("Could not read the video file");
                    } else
                        break;
                }
            }

            double[][][] hist = new double[w_d][h_d][o_d];

            Imgproc.resize(original_frame, resized, new Size(frame_width, frame_height));
            Imgproc.cvtColor(resized, resized_gray, Imgproc.COLOR_BGR2GRAY);

            ArrayList<double[][]> gradients = computeGradients(resized_gray, o_d);
            updateGradientHistogram(hist, gradients);

            histograms.add(hist);
        }

        capture.release();
    }

    return histograms;
}

From source file:houghtransform.Picture.java

public Picture(String fileName, int choiceT) throws IOException {

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    Mat scr = Imgcodecs.imread(fileName);
    if (scr.empty())
        throw new IOException("File not exist!!!");

    Mat edges = new Mat();
    // Canny Edge Detector
    Imgproc.Canny(scr, edges, 50, 200, 3, false);

    // Hough Transform
    Transform transf;//w w w.  j  av  a  2 s. c  om
    switch (choiceT) {
    case 1:
        transf = new OpenCV();
        break;
    case 2:
        transf = new MyTransform();
        break;
    default:
        throw new IOException("Transform wasn't choiced!!!");
    }

    transf.houghTransform(edges);
    scr = transf.drawLines(scr);

    PictureJFrame x = new PictureJFrame(convertMatToBufferedImage(edges), convertMatToBufferedImage(scr));
    x.setVisible(true);

}

From source file:hu.unideb.fksz.VideoProcessor.java

License:Open Source License

/**
 * Does the main loop, if we reach the penultimate frame,
 * it means we have reached the end of the end of the video.
 *///  w  w  w .j  av a  2  s . c o m
public void processVideo() {
    do {
        Mat tmp = new Mat();
        video.read(tmp);
        if (!tmp.empty()) {
            frame = tmp.clone();
            tmp.release();
            if (frameCounter < (getFrameCount() / 2) - 1) {
                frameCounter++;
                if (getMinutes() > 0) {
                    carsPerMinute = getDetectedCarsCount() / getMinutes();
                }

                processFrame(getFrame());
            } else {
                frameCounter = 0;
                finished = true;

                logger.trace("Restarting..");
                setFramePos(1);
            }
        } else {
            logger.warn("Empty image!");
            frameCounter = 0;
            finished = true;

            logger.trace("Restarting..");
            setFramePos(1);
        }
    } while (frameCounter > (getFrameCount() / 2) - 2);
}

From source file:hu.unideb.fksz.VideoProcessor.java

License:Open Source License

/**
 * Returns an {@code Image}, converted from a {@code Mat}.
 *
 * @param frameToConvert   The frame to be converted to a {@code Image}
 * @return   The {@code Image}, converted from a {@code Mat}
 *//*from   w  w w  . jav a2 s  .  c o m*/
public Image convertCvMatToImage(Mat frameToConvert) {
    if (!buffer.empty()) {
        buffer.release();
    }
    try {
        Imgproc.resize(frameToConvert, frameToConvert, frameSize);
        Imgcodecs.imencode(".jpg", frameToConvert, buffer, params);
    } catch (Exception e) {
        logger.error(e.getMessage());
    }
    fxImage = new Image(new ByteArrayInputStream(buffer.toArray()));
    if (!frameToConvert.empty()) {
        frameToConvert.release(); /////
    }

    return fxImage;
}

From source file:interactivespaces.service.image.vision.opencv.OpenCvVideoLoop.java

License:Apache License

@Override
protected void loop() throws InterruptedException {
    Mat frame = new Mat();
    capture.grab();/*from   w w w. ja va2 s. c om*/
    capture.retrieve(frame);
    if (frame.empty()) {
        log.warn("No image");
        return;
    }

    notifyListenersNewVideoFrame(frame);
}

From source file:io.appium.java_client.ScreenshotState.java

License:Apache License

/**
 * Compares two valid java bitmaps and calculates similarity score between them.
 *
 * @param refImage   reference image/*from   w ww  .  jav a  2 s .  c  om*/
 * @param tplImage   template
 * @param resizeMode one of possible enum values. Set it either to <em>TEMPLATE_TO_REFERENCE_RESOLUTION</em> or
 *                   <em>REFERENCE_TO_TEMPLATE_RESOLUTION</em> if given bitmaps have different dimensions
 * @return similarity score value in range (-1.0, 1.0). 1.0 is returned if the images are equal
 * @throws ScreenshotComparisonError if provided images are not valid or have
 *                                   different resolution, but resizeMode has been set to <em>NO_RESIZE</em>
 */
public static double getOverlapScore(BufferedImage refImage, BufferedImage tplImage, ResizeMode resizeMode) {
    Mat ref = prepareImageForComparison(refImage);
    if (ref.empty()) {
        throw new ScreenshotComparisonError("Reference image cannot be converted for further comparison");
    }
    Mat tpl = prepareImageForComparison(tplImage);
    if (tpl.empty()) {
        throw new ScreenshotComparisonError("Template image cannot be converted for further comparison");
    }
    switch (resizeMode) {
    case TEMPLATE_TO_REFERENCE_RESOLUTION:
        tpl = resizeFirstMatrixToSecondMatrixResolution(tpl, ref);
        break;
    case REFERENCE_TO_TEMPLATE_RESOLUTION:
        ref = resizeFirstMatrixToSecondMatrixResolution(ref, tpl);
        break;
    default:
        // do nothing
    }

    if (ref.width() != tpl.width() || ref.height() != tpl.height()) {
        throw new ScreenshotComparisonError(
                "Resolutions of template and reference images are expected to be equal. "
                        + "Try different resizeMode value.");
    }

    Mat res = new Mat(ref.rows() - tpl.rows() + 1, ref.cols() - tpl.cols() + 1, CvType.CV_32FC1);
    Imgproc.matchTemplate(ref, tpl, res, Imgproc.TM_CCOEFF_NORMED);
    return Core.minMaxLoc(res).maxVal;
}

From source file:javacv.JavaCV.java

/**
 * @param args the command line arguments
 *///from w  ww  . j  av  a  2 s . c  o  m
public static void main(String[] args) {
    // TODO code application logic here

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat mat = Mat.eye(3, 3, CvType.CV_8UC1);
    System.out.println("mat = " + mat.dump());

    CascadeClassifier faceDetector = new CascadeClassifier("./data/lbpcascade_frontalface.xml");
    //CascadeClassifier faceDetector = new CascadeClassifier();

    JFrame frame = new JFrame("BasicPanel");
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.setSize(400, 400);
    JavaCV panel = new JavaCV();
    frame.setContentPane(panel);
    frame.setVisible(true);
    Mat webcam_image = new Mat();
    BufferedImage temp;
    VideoCapture capture;
    capture = new VideoCapture(0);

    if (capture.isOpened()) {
        while (true) {
            capture.read(webcam_image);
            if (!webcam_image.empty()) {
                frame.setSize(webcam_image.width() + 40, webcam_image.height() + 60);

                MatOfRect faceDetections = new MatOfRect();
                faceDetector.detectMultiScale(webcam_image, faceDetections);

                //System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

                // Draw a bounding box around each face.
                for (Rect rect : faceDetections.toArray()) {
                    Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                            new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
                }

                temp = matToBufferedImage(webcam_image);
                panel.setimage(temp);
                panel.repaint();
            } else {
                System.out.println(" --(!) No captured frame -- Break!");
                break;
            }
        }
    }
    return;

}

From source file:logic.imageloader.USBCamera.java

@Override
public Mat loadImage() {
    Mat frame = new Mat();

    if (isByKey) {
        KeyBoardListener kbl = new KeyBoardListener();
        imShowOrig.Window.addKeyListener(kbl);

        while (!kbl.isAPressed) {
            capture.retrieve(frame);/*w  w  w .  j  a  v  a  2  s.  co  m*/

            if (!frame.empty())
                imShowOrig.showImage(frame);
        }
    } else
        capture.retrieve(frame);

    return frame;
}

From source file:Main.Camera.CameraController.java

private Mat grabFrame() {
    // init everything
    Mat frame = new Mat();

    // check if the capture is open
    if (this.capture.isOpened()) {
        try {/* w  ww .  ja v a  2  s .c o m*/
            // read the current frame
            this.capture.read(frame);
            // if the frame is not empty, process it
            if (!frame.empty()) {
                Imgproc.cvtColor(frame, frame, Imgproc.COLOR_BGR2GRAY);
            }
        } catch (Exception e) {
            // log the error
            System.err.println("Exception during the image elaboration: " + e);
        }
    }
    return frame;
}