Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:intrudersrecorder.MainFXMLDocumentController.java

@Override
public void initialize(URL url, ResourceBundle rb) {
    vidCap = new VideoCapture();

    previousMatrix = new Mat();
    subtractionResult = new Mat();
    col = new ArrayList<>();

    resolutionWidth = 100;/* w w  w . j  a v  a 2  s.c om*/
    resolutionHeight = 100;

    sliderInit();
    resSliderListener();
    pixDiffSliderListener();
    objSizeSliderListener();

    cameraSwitch.setStyle("-fx-background-color: red");
}

From source file:intrudersrecorder.MainFXMLDocumentController.java

private Image grabFrame() {
    inputImage = null;/*from ww  w . j ava2 s .  c  o m*/
    singleFrame = new Mat();
    if (vidCap.isOpened()) {
        try {
            vidCap.read(singleFrame);
            Imgproc.cvtColor(singleFrame, singleFrame, Imgproc.COLOR_BGR2GRAY);
            if (!hasPrevious) {
                MatOfByte buff = new MatOfByte();
                Imgcodecs.imencode(".jpg", singleFrame, buff);
                inputImage = new Image(new ByteArrayInputStream(buff.toArray()));
            }
        } catch (Exception e) {
            System.out.println("problem in reading frame");
        }
    }
    if (hasPrevious) {
        Core.absdiff(singleFrame, previousMatrix, subtractionResult);
        hi = singleFrame.rows();
        wi = singleFrame.cols();
        col.clear();
        for (int r = 0; r < hi; r++) {
            for (int c = 0; c < wi; c++) {
                //                    double v1[] = singleFrame.get(r, c);
                //                    double v2[] = prevMat.get(r, c);
                //                    double diff = Math.abs(v1[0] - v2[0]);
                previousMatrix.put(r, c, singleFrame.get(r, c)[0]);
                if (subtractionResult.get(r, c)[0] >= minPixelDiff) {
                    subtractionResult.put(r, c, 255);
                    col.add(new Coordinates(r, c));
                } else {
                    subtractionResult.put(r, c, 0);
                }
            }
        }

        boolean ok = true;
        int sz = col.size();
        for (int i = 0; i < sz; i++) {
            double v[] = subtractionResult.get(col.get(i).getR(), col.get(i).getC());
            if (v[0] == 255) {
                topLeftR = 2 * hi;
                topLeftC = 2 * wi;
                bottomRightR = -1;
                bottomRightC = -1;
                cnt = 1;
                floodFill(col.get(i).getR(), col.get(i).getC());
                if (cnt >= minPixelsOfAnObject) {
                    //                      System.out.println("burglur........"+cnt);
                    drawBorder();
                }
            }
        }

        // using non recursive flood fill

        //            for(int i = 0; i < sz; i++){
        //                topLeftR = 2 * hi;
        //                topLeftC = 2 * wi;
        //                bottomRightR = -1;
        //                bottomRightC = -1;
        //                cnt = 1;
        //                Coordinates c = new Coordinates(col.get(i).getR(), col.get(i).getC());
        //                if(subsRes.get(c.getR(), c.getC())[0] == 255){
        //                    nonRecursiveFloodfill(c);
        //                    if(cnt >= minPixelsOfAnObject){
        //                        System.out.println("burglur........"+cnt);
        //                        //floodOne(r, c);
        //                        //drawBorder();
        //                        //ok = false;
        //                    }
        //                }
        //            }

        //            System.out.println("h: "+subsRes.rows() + " w "+subsRes.cols());
        MatOfByte buff = new MatOfByte();
        Imgcodecs.imencode(".jpg", singleFrame, buff);
        inputImage = new Image(new ByteArrayInputStream(buff.toArray()));
    } else {
        //System.out.println("first frame");
        hasPrevious = true;
        previousMatrix = singleFrame.clone();
    }
    return inputImage;
}

From source file:io.appium.java_client.ScreenshotState.java

License:Apache License

private static Mat resizeFirstMatrixToSecondMatrixResolution(Mat first, Mat second) {
    if (first.width() != second.width() || first.height() != second.height()) {
        final Mat result = new Mat();
        final Size sz = new Size(second.width(), second.height());
        Imgproc.resize(first, result, sz);
        return result;
    }/* ww  w . j a v  a2  s.  c  o  m*/
    return first;
}

From source file:io.github.jakejmattson.facialrecognition.FacialRecognition.java

License:Open Source License

private static void capture() {
    File classifier = new File("lbpcascade_frontalface_improved.xml");

    if (!classifier.exists()) {
        displayFatalError("Unable to find classifier!");
        return;/*from   w  w  w  . j  a va 2s . c o m*/
    }

    CascadeClassifier faceDetector = new CascadeClassifier(classifier.toString());
    VideoCapture camera = new VideoCapture(0);

    if (!camera.isOpened()) {
        displayFatalError("No camera detected!");
        return;
    }

    if (!DATABASE.exists())
        DATABASE.mkdir();

    ImageFrame frame = new ImageFrame();

    while (frame.isOpen() && camera.isOpened()) {
        Mat rawImage = new Mat();
        camera.read(rawImage);
        Mat newImage = detectFaces(rawImage, faceDetector, frame);
        frame.showImage(newImage);
    }

    camera.release();
}

From source file:io.github.jakejmattson.facialrecognition.FacialRecognition.java

License:Open Source License

private static int compareFaces(Mat currentImage, String fileName) {
    Mat compareImage = Imgcodecs.imread(fileName);
    ORB orb = ORB.create();
    int similarity = 0;

    MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
    MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
    orb.detect(currentImage, keypoints1);
    orb.detect(compareImage, keypoints2);

    Mat descriptors1 = new Mat();
    Mat descriptors2 = new Mat();
    orb.compute(currentImage, keypoints1, descriptors1);
    orb.compute(compareImage, keypoints2, descriptors2);

    if (descriptors1.cols() == descriptors2.cols()) {
        MatOfDMatch matchMatrix = new MatOfDMatch();
        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
        matcher.match(descriptors1, descriptors2, matchMatrix);
        DMatch[] matches = matchMatrix.toArray();

        for (DMatch match : matches)
            if (match.distance <= 50)
                similarity++;//w w w .ja v a2 s  .  c o m
    }

    return similarity;
}

From source file:jarvis.module.colourtracking.ColourTrackingModule.java

public ColourTrackingModule() {
    System.out.println("Initialising colour tracking module...");

    // Load native library
    System.loadLibrary("opencv_java2410");

    // Initialise webcam
    cap = new VideoCapture(0);

    // Initialise window
    frame = new JFrame("untitled");
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.setSize(640, 480 + 50);/*ww  w .  j  a  v a 2 s .c o m*/
    panel = new Panel();
    frame.setContentPane(panel);
    lowH = new JSlider(JSlider.HORIZONTAL, 0, 179, 0);
    panel.add(lowH);
    lowS = new JSlider(JSlider.HORIZONTAL, 0, 255, 0);
    panel.add(lowS);
    lowV = new JSlider(JSlider.HORIZONTAL, 0, 255, 0);
    panel.add(lowV);
    highH = new JSlider(JSlider.HORIZONTAL, 0, 179, 0);
    panel.add(highH);
    highS = new JSlider(JSlider.HORIZONTAL, 0, 255, 0);
    panel.add(highS);
    highV = new JSlider(JSlider.HORIZONTAL, 0, 255, 0);
    panel.add(highV);
    frame.setVisible(true);
    running = true;

    imageOriginal = new Mat();
    imageHSV = new Mat();
    imageThresholded = new Mat();
    mat2image = new Mat2Image();

    System.out.println("done");
}

From source file:javaapplication1.Ocv.java

public void blendWithGray50(String input, String output) {
    // load the image and read it into a matrix
    File f2 = new File(input);
    Mat image = Highgui.imread(this.input);

    // clone the image, and convert it to grayscale
    Mat gray = image.clone();/*from w w w . ja v a  2 s. c  o  m*/
    Imgproc.cvtColor(gray, gray, Imgproc.COLOR_BGR2GRAY, 1);
    Imgproc.cvtColor(gray, gray, Imgproc.COLOR_GRAY2BGR, 3);

    // blend the two images (equal weight) into a new matrix and save it
    Mat dst = new Mat();
    Core.addWeighted(image, .5f, gray, .5f, 0.0, dst);
    Highgui.imwrite(this.output, dst);
}

From source file:javaapplication1.Ocv.java

void doSobel(String input, String output) {
    // load the image and read it into a matrix
    File f2 = new File(input);
    Mat image = Highgui.imread(this.input);

    // blur the image
    Imgproc.GaussianBlur(image, image, new Size(3, 3), 0, 0, Imgproc.BORDER_DEFAULT);

    // convert the image to gray
    Mat gray = new Mat();
    Imgproc.cvtColor(image, gray, Imgproc.COLOR_BGR2GRAY);

    // parameters for our conversion
    int scale = 1;
    int delta = 0;
    int ddepth = CvType.CV_16S;

    // figure out the X gradient, get its absolute value
    Mat gx = new Mat();
    Imgproc.Sobel(gray, gx, ddepth, 1, 0, 3, scale, delta, Imgproc.BORDER_DEFAULT);
    Mat abs_gx = new Mat();
    Core.convertScaleAbs(gx, abs_gx);//w ww. j  ava  2 s.  c o m

    // do the same for Y
    Mat gy = new Mat();
    Imgproc.Sobel(gray, gy, ddepth, 0, 1, 3, scale, delta, Imgproc.BORDER_DEFAULT);
    Mat abs_gy = new Mat();
    Core.convertScaleAbs(gy, abs_gy);

    // do a simple combine, and save it
    Mat grad = new Mat();
    Core.addWeighted(abs_gx, 0.5, abs_gy, 0.5, 0, grad);
    Highgui.imwrite(this.output, grad);
}

From source file:javacv.JavaCV.java

/**
 * @param args the command line arguments
 *///from w ww.j  av  a 2 s  .  com
public static void main(String[] args) {
    // TODO code application logic here

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat mat = Mat.eye(3, 3, CvType.CV_8UC1);
    System.out.println("mat = " + mat.dump());

    CascadeClassifier faceDetector = new CascadeClassifier("./data/lbpcascade_frontalface.xml");
    //CascadeClassifier faceDetector = new CascadeClassifier();

    JFrame frame = new JFrame("BasicPanel");
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    frame.setSize(400, 400);
    JavaCV panel = new JavaCV();
    frame.setContentPane(panel);
    frame.setVisible(true);
    Mat webcam_image = new Mat();
    BufferedImage temp;
    VideoCapture capture;
    capture = new VideoCapture(0);

    if (capture.isOpened()) {
        while (true) {
            capture.read(webcam_image);
            if (!webcam_image.empty()) {
                frame.setSize(webcam_image.width() + 40, webcam_image.height() + 60);

                MatOfRect faceDetections = new MatOfRect();
                faceDetector.detectMultiScale(webcam_image, faceDetections);

                //System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

                // Draw a bounding box around each face.
                for (Rect rect : faceDetections.toArray()) {
                    Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                            new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
                }

                temp = matToBufferedImage(webcam_image);
                panel.setimage(temp);
                panel.repaint();
            } else {
                System.out.println(" --(!) No captured frame -- Break!");
                break;
            }
        }
    }
    return;

}

From source file:javafx1.JavaFX1.java

private Mat doCanny(Mat frame) {
        // init//w  w  w.ja  va2  s.  c o  m
        Mat grayImage = new Mat();
        Mat detectedEdges = new Mat();

        // convert to grayscale
        Imgproc.cvtColor(frame, grayImage, Imgproc.COLOR_BGR2GRAY);

        // reduce noise with a 3x3 kernel
        Imgproc.blur(grayImage, detectedEdges, new Size(2, 2));

        // canny detector, with ratio of lower:upper threshold of 3:1
        // Imgproc.Canny(detectedEdges, detectedEdges, 2, 2 * 3);
        // using Canny's output as a mask, display the result
        Mat dest = new Mat();
        frame.copyTo(dest, detectedEdges);

        return dest;
    }