Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:attendance_system_adder.video.java

public synchronized void displayVideo(int index) {
    isdisplayClose = false;//from   www. j  a  va  2 s .co m
    if (vc == null) {
        vc = new VideoCapture(index);
    }
    if (img == null) {
        img = new image();
    }
    if (mat == null) {
        mat = new Mat();
    }

    vdf.setVisible(true);

    while (true) {
        if (isdisplayClose) {
            img.closeDisplayFrame();
            vc.release();
            break;
        }
        vc.read(mat);
        grayMat = img.RGBtoGRAY(mat);
        Mat2BufferedImage = img.Mat2BufferedImage(grayMat);
        img.displayImage(Mat2BufferedImage);

    }
}

From source file:attendance_system_adder.video.java

public void faceDetctDisplayVideo(int index) {
    isdisplayClose = false;/*from  www.  j a  va2  s.  com*/
    if (vc == null) {
        vc = new VideoCapture(index);
    }
    if (img == null) {
        img = new image();
    }
    if (mat == null) {
        mat = new Mat();
    }

    vdf.setVisible(true);

    while (true) {
        if (isdisplayClose) {
            img.closeDisplayFrame();
            vc.release();
            break;
        }
        vc.read(mat);
        grayMat = img.RGBtoGRAY(mat);
        //            Mat2BufferedImage= img.Mat2BufferedImage(grayMat);
        img.faceDetctDisplayImage(grayMat);

        //            try {
        //                this.sleep(50);
        //            } catch (InterruptedException ex) {
        //                Logger.getLogger(video.class.getName()).log(Level.SEVERE, null, ex);
        //            }

    }
}

From source file:balldetection.BallDetection.java

/**
 * @param args the command line arguments
 * @throws java.io.IOException/*from   w  ww  . j av  a2 s.c o m*/
 */
public static void main(String[] args) throws IOException {

    /* Set the Nimbus look and feel */
    //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
    /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
     * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html 
     */
    try {
        for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
            if ("Nimbus".equals(info.getName())) {
                javax.swing.UIManager.setLookAndFeel(info.getClassName());
                break;
            }
        }
    } catch (ClassNotFoundException | InstantiationException | IllegalAccessException
            | javax.swing.UnsupportedLookAndFeelException ex) {
        java.util.logging.Logger.getLogger(CameraWindow.class.getName()).log(java.util.logging.Level.SEVERE,
                null, ex);
    }
    //</editor-fold>

    CameraWindow cWindow = new CameraWindow();
    cWindow.setVisible(true);

    int radius = 0;
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    //intialization of matrices
    Mat circles = new Mat();
    gray = new Mat();
    hsv = new Mat();
    filter = new Mat();
    dst = new Mat();

    camera = new VideoCapture(0);
    Mat frame = new Mat();
    Webcam.ImagePanel panel = Webcam.createPanel(camera, "src");
    Webcam.ImagePanel panel2 = Webcam.createPanel(camera, "filter");
    Webcam.ImagePanel panel3 = Webcam.createPanel(camera, "dst");

    while (true) {

        camera.read(frame);
        src = frame;

        GaussianBlur(src, src, new Size(3, 3), 2, 2);
        Imgproc.cvtColor(src, hsv, Imgproc.COLOR_BGR2HSV);
        Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);

        Core.inRange(gray, new Scalar(20, 100, 100), new Scalar(30, 255, 255), gray);
        Core.inRange(hsv, new Scalar(cWindow.get_hLower(), cWindow.get_sLower(), cWindow.get_vLower()),
                new Scalar(cWindow.get_hUpper(), cWindow.get_sUpper(), cWindow.get_vUpper()), filter);

        Core.inRange(src, new Scalar(cWindow.get_hLower(), cWindow.get_sLower(), cWindow.get_vLower()),
                new Scalar(cWindow.get_hUpper(), cWindow.get_sUpper(), cWindow.get_vUpper()), dst);

        double[] temp = hsv.get(hsv.rows() / 2, hsv.cols() / 2);
        System.out.println(temp[0] + ", " + temp[1] + ", " + temp[2] + ", " + radius);
        //System.out.println("Current Distance from ball: " + ((2.5366*radius) - 123.02));

        Imgproc.HoughCircles(filter, circles, CV_HOUGH_GRADIENT, cWindow.get_dp(), filter.rows() / 2,
                cWindow.get_param1(), cWindow.get_param2(), cWindow.get_minCircleSize(),
                cWindow.get_maxCircleSize());

        for (int i = 0; i < circles.cols(); i++) {
            Point center = new Point(Math.round(circles.get(0, i)[0]), Math.round(circles.get(0, i)[1]));
            radius = (int) Math.round(circles.get(0, i)[2]);
            // draw the circle center
            Core.circle(src, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
            // draw the circle outline
            Core.circle(src, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
            //System.out.println("" + circles.get(0,0)[0] + ", " + circles.get(0,0)[1] + ", " + circles.get(0,0)[2]);
        }

        panel.updateImage(toBufferedImage(src));
        panel2.updateImage(toBufferedImage(filter));
        panel3.updateImage(toBufferedImage(dst));
    }
}

From source file:balldetection.BallDetection.java

public static void takeScreenshot(int mat) {
    Date tempDate = new Date();
    Mat matFrame = new Mat();

    switch (mat) {
    case 1://  w w  w .  j a v  a 2 s  .  c  o  m
        matFrame = src;
        break;
    case 2:
        matFrame = dst;
        break;
    case 3:
        matFrame = hsv;
        break;
    default:
        matFrame = filter;
        break;
    }

    //camera.read(matFrame);
    Highgui.imwrite("screenshots\\ Screenshot " + counter + " -"
            + String.format("%1$s %2$tb %2$td at %2$tH %2$tM %2$tS", "", tempDate) + ".jpeg", matFrame);
    counter++;
}

From source file:balldetection.Webcam.java

public static Image toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        Mat m2 = new Mat();
        Imgproc.cvtColor(m, m2, Imgproc.COLOR_BGR2RGB);
        type = BufferedImage.TYPE_3BYTE_BGR;
        m = m2;//from  w  w w  .j  a va2  s .  c om
    }
    byte[] b = new byte[m.channels() * m.cols() * m.rows()];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    image.getRaster().setDataElements(0, 0, m.cols(), m.rows(), b);
    return image;
}

From source file:balldetection.Webcam.java

public static void main(String[] args) throws Exception {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    //  VideoCapture camera = new VideoCapture(0);
    Mat frame = new Mat();
    ImagePanel panel = createPanel(camera, "camera");
    while (true) {
        camera.read(frame);//from   w  ww  .  ja v  a  2 s  . c  om
        panel.updateImage(toBufferedImage(frame));
    }
}

From source file:balldetection.Webcam.java

public static void takeScreenshot() {
    Mat matFrame = new Mat();
    camera.read(matFrame);//from  w w w.jav a  2 s . c o m
    Highgui.imwrite("screenshots\\screenshot " + counter + ".jpeg", matFrame);
    counter++;
}

From source file:bikecalibration.fxml.controller.MainWindowController.java

private void forwardPlayVideo(int start) throws Exception {
    LOGGER.log(Level.INFO, "forwardPlayVideo Started");
    cap.set(Videoio.CAP_PROP_POS_FRAMES, start);
    Mat frame = new Mat();
    int frameCounter = start;
    while (cap.read(frame)) {
        if (!videoForwardPlayProperty.get()) {
            Thread.currentThread().interrupt();
            break;
        }/*from  w  w  w . j a  v  a 2 s .  c  o  m*/
        long startMillis = System.currentTimeMillis();
        final int currentCounter = frameCounter;
        final Image currentImage = Utils.matToImage(frame);
        Platform.runLater(() -> {
            videoSlider.setValue(currentCounter);
        });
        Platform.runLater(() -> {
            drawImage(currentImage);
        });
        currentVideoFrameNumberProperty.set(currentCounter);
        long endMillis = System.currentTimeMillis();
        long sleepMillis = (long) (1000 / videoInfo.FPS.get()) - (endMillis - startMillis);
        if (sleepMillis > 0) {
            try {
                Platform.runLater(() -> {
                    lblStatus.setText(String.format("%d FPS", (long) videoInfo.FPS.get()));
                });
                Thread.sleep(sleepMillis);
            } catch (InterruptedException ex) {
                LOGGER.log(Level.SEVERE, ex.getMessage());
                Thread.currentThread().interrupt();
            }
        } else {
            final long fps = Utils.calculateFPS(startMillis, endMillis, 1);
            Platform.runLater(() -> {
                lblStatus.setText(String.format("%d FPS", fps));
            });
        }

        frameCounter++;
    }
    videoForwardPlayProperty.set(false);
    LOGGER.log(Level.INFO, "forwardPlayVideo Ended");
}

From source file:bikecalibration.fxml.controller.MainWindowController.java

private void reversePlayVideo(int start) throws Exception {
    LOGGER.log(Level.INFO, "reversePlayVideo started.");
    cap.set(Videoio.CAP_PROP_POS_FRAMES, start);
    Mat frame = new Mat();
    int frameCounter = start;
    while (cap.read(frame)) {
        if (!videoBackwardsPlayProperty.get()) {
            break;
        }//from  ww w  .  ja va  2s. c om
        long startMillis = System.currentTimeMillis();
        final int currentCounter = frameCounter;
        final Image currentImage = Utils.matToImage(frame);
        Platform.runLater(() -> {
            videoSlider.setValue(currentCounter);
        });
        Platform.runLater(() -> {
            drawImage(currentImage);
        });
        Platform.runLater(() -> {
            currentFrameNumberField.setText(String.format("%d", currentCounter));
        });
        currentVideoFrameNumberProperty.set(currentCounter);
        long endMillis = System.currentTimeMillis();
        long sleepMillis = (long) (1000 / videoInfo.FPS.get()) - (endMillis - startMillis);
        frameCounter--;
        if (frameCounter >= 0) {
            cap.set(Videoio.CAP_PROP_POS_FRAMES, frameCounter);
        } else {
            break;
        }
        if (sleepMillis > 0) {
            try {
                Platform.runLater(() -> {
                    lblStatus.setText(String.format("%d FPS", (long) videoInfo.FPS.get()));
                });
                Thread.sleep(sleepMillis);
            } catch (InterruptedException ex) {
                LOGGER.log(Level.SEVERE, ex.getMessage());
                Thread.currentThread().interrupt();
            }
        } else {
            final long fps = Utils.calculateFPS(startMillis, endMillis, 1);
            Platform.runLater(() -> {
                lblStatus.setText(String.format("%d FPS", fps));
            });
        }
    }
    videoBackwardsPlayProperty.set(false);
    LOGGER.log(Level.INFO, "reversePlayVideo ended.");
}

From source file:bikecalibration.OpenCvUtils.java

public static Mat getImageFromVideo(int position, VideoCapture cap) {
    Mat frame = new Mat();
    cap.set(Videoio.CAP_PROP_POS_FRAMES, position);
    cap.read(frame);/*from w  w  w  . ja v  a 2 s.c o  m*/
    return frame;
}