Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:eu.fpetersen.robobrain.behavior.followobject.OrbObjectDetector.java

License:Open Source License

public void process(Mat image) {
    Mat tempImage = new Mat();
    Imgproc.cvtColor(image, tempImage, Imgproc.COLOR_RGBA2RGB);
    MatOfKeyPoint keypoints = detectInImage(tempImage);
    Mat descriptors = extractDescriptors(keypoints, tempImage);
    MatOfDMatch matches = new MatOfDMatch();
    matcher.match(descriptors, originalDescriptors, matches);

    KeyPoint[] keypointArray = keypoints.toArray();
    KeyPoint[] originalKeypointArray = originalKeypoints.toArray();

    float min = 40.0f;
    float max = 1000.0f;
    for (DMatch match : matches.toList()) {
        if (match.distance < min) {
            min = match.distance;//from   w w  w  .jav  a  2  s. c om
        } else if (match.distance > max) {
            max = match.distance;
        }
    }

    float threshold = 1.5f * min;
    List<KeyPoint> matchedKeyPoints = new ArrayList<KeyPoint>();
    List<Point> matchedPoints = new ArrayList<Point>();
    List<Point> matchedOriginalPoints = new ArrayList<Point>();
    for (DMatch match : matches.toList()) {
        if (match.distance < threshold) {
            KeyPoint matchedKeypoint = keypointArray[match.queryIdx];
            matchedKeyPoints.add(matchedKeypoint);
            matchedPoints.add(matchedKeypoint.pt);

            KeyPoint matchedOriginalKeypoint = originalKeypointArray[match.trainIdx];
            matchedOriginalPoints.add(matchedOriginalKeypoint.pt);
        }
    }

    if (matchedKeyPoints.size() > 10) {

        Mat H = Calib3d.findHomography(
                new MatOfPoint2f(matchedOriginalPoints.toArray(new Point[matchedOriginalPoints.size()])),
                new MatOfPoint2f(matchedPoints.toArray(new Point[matchedPoints.size()])), Calib3d.RANSAC, 10);

        List<Point> originalCorners = new ArrayList<Point>();
        originalCorners.add(new Point(0, 0));
        originalCorners.add(new Point(originalImage.cols(), 0));
        originalCorners.add(new Point(originalImage.cols(), originalImage.rows()));
        originalCorners.add(new Point(0, originalImage.rows()));

        List<Point> corners = new ArrayList<Point>();
        for (int i = 0; i < 4; i++) {
            corners.add(new Point(0, 0));
        }
        Mat objectCorners = Converters.vector_Point2f_to_Mat(corners);

        Core.perspectiveTransform(Converters.vector_Point2f_to_Mat(originalCorners), objectCorners, H);
        corners.clear();
        Converters.Mat_to_vector_Point2f(objectCorners, corners);

        Core.line(tempImage, corners.get(0), corners.get(1), new Scalar(0, 255, 0), 4);
        Core.line(tempImage, corners.get(1), corners.get(2), new Scalar(0, 255, 0), 4);
        Core.line(tempImage, corners.get(2), corners.get(3), new Scalar(0, 255, 0), 4);
        Core.line(tempImage, corners.get(3), corners.get(0), new Scalar(0, 255, 0), 4);
    }

    Features2d.drawKeypoints(tempImage,
            new MatOfKeyPoint(matchedKeyPoints.toArray(new KeyPoint[matchedKeyPoints.size()])), tempImage);
    Imgproc.cvtColor(tempImage, image, Imgproc.COLOR_RGB2RGBA);

}

From source file:FaceRecog.App.java

private void runMainLoop(String[] args) {
    ImageProcessor imageProcessor = new ImageProcessor();
    Mat webcamMatImage = new Mat();
    Image tempImage;/*from   www. jav a  2  s .  co m*/
    VideoCapture capture = new VideoCapture(0);
    capture.set(Videoio.CAP_PROP_FRAME_WIDTH, 320);
    capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, 240);
    if (capture.isOpened()) {
        while (true) {
            capture.read(webcamMatImage);
            if (!webcamMatImage.empty()) {
                tempImage = imageProcessor.toBufferedImage(webcamMatImage);
                ImageIcon imageIcon = new ImageIcon(tempImage, "Captured video");
                imageLabel.setIcon(imageIcon);
                frame.pack(); //this will resize the window to fit the image
            } else {
                System.out.println(" -- Frame not captured -- Break!");
                break;
            }
        }
    } else {
        System.out.println("Couldn't open capture.");
    }
}

From source file:fi.conf.tabare.ARDataProvider.java

private void detect() {

    //Mat composite_image;
    Mat input_image = new Mat();
    Mat undistorted_image = new Mat();
    Mat circles = new Mat();
    MatOfKeyPoint mokp = new MatOfKeyPoint();
    Mat cameraMatrix = null;//from  w w w. j  a v  a  2 s  . c o  m

    //List<Mat> channels = new LinkedList<>();

    //Loop
    while (running) {
        try {
            if (inputVideo.read(input_image)) {
                Mat preview_image = null;

                if (selectedView == View.calib)
                    preview_image = input_image.clone();

                //Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_RGB2HSV);
                //Core.split(input_image, channels);

                Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_BGR2GRAY);

                //Imgproc.equalizeHist(input_image, input_image);

                input_image.convertTo(input_image, -1, params.contrast, params.brightness); //image*contrast[1.0-3.0] + brightness[0-255]

                doBlur(input_image, input_image, params.blur, params.blurAmount);

                if (selectedView == View.raw)
                    preview_image = input_image.clone();

                if (params.enableDistortion) {

                    if (cameraMatrix == null)
                        cameraMatrix = Imgproc.getDefaultNewCameraMatrix(Mat.eye(3, 3, CvType.CV_64F),
                                new Size(input_image.width(), input_image.height()), true);

                    Imgproc.warpAffine(input_image, input_image, shiftMat, frameSize);

                    if (undistorted_image == null)
                        undistorted_image = new Mat((int) frameSize.width * 2, (int) frameSize.height * 2,
                                CvType.CV_64F);

                    Imgproc.undistort(input_image, undistorted_image, cameraMatrix, distCoeffs);

                    input_image = undistorted_image.clone();

                    if (selectedView == View.dist)
                        preview_image = input_image.clone();

                }

                //               if(background == null) background = input_image.clone();         
                //               if(recaptureBg){
                //                  backgSubstractor.apply(background, background);
                //                  System.out.println(background.channels() + " " + background.size() );
                //                  System.out.println(input_image.channels() + " " + input_image.size() );
                //                  recaptureBg = false;
                //               }
                //               if(dynamicBGRemoval){
                //                  //Imgproc.accumulateWeighted(input_image, background, dynamicBGAmount);
                //                  //Imgproc.accumulateWeighted(input_image, background, 1.0f);
                //                  //Core.subtract(input_image, background, input_image);
                //                  //Core.bitwise_xor(input_image, background, input_image);
                //
                //                  doBlur(input_image, background, Blur.normal_7x7, 0); //Blur a little, to get nicer result when substracting
                //                  backgSubstractor.apply(background, background, dynamicBGAmount);
                //               }
                //               if(background != null) Core.add(input_image, background, input_image);

                if (params.blobTracking) {
                    Mat blobs_image = input_image.clone();

                    Imgproc.threshold(blobs_image, blobs_image, params.blobThreshold, 254,
                            (params.blobThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY));

                    Size kernelSize = null;

                    switch (params.blobMorpthKernelSize) {
                    case size_3x3:
                        kernelSize = new Size(3, 3);
                        break;
                    case size_5x5:
                        kernelSize = new Size(5, 5);
                        break;
                    case size_7x7:
                        kernelSize = new Size(7, 7);
                        break;
                    case size_9x9:
                        kernelSize = new Size(9, 9);
                        break;
                    }

                    int kernelType = -1;

                    switch (params.blobMorphKernelShape) {
                    case ellipse:
                        kernelType = Imgproc.MORPH_ELLIPSE;
                        break;
                    case rect:
                        kernelType = Imgproc.MORPH_RECT;
                        break;
                    default:
                        break;
                    }

                    switch (params.blobMorphOps) {
                    case dilate:
                        Imgproc.dilate(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    case erode:
                        Imgproc.erode(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    default:
                        break;
                    }

                    if (blobFeatureDetector == null)
                        blobFeatureDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);

                    blobFeatureDetector.detect(blobs_image, mokp);
                    blobData.add(mokp);

                    if (selectedView == View.blob)
                        preview_image = blobs_image.clone();

                    blobs_image.release();
                }

                if (params.tripTracking) {

                    Mat trips_image = undistorted_image.clone();

                    if (params.tripEnableThresholding)
                        if (params.tripAdaptThreshold) {
                            Imgproc.adaptiveThreshold(trips_image, trips_image, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY),
                                    Imgproc.ADAPTIVE_THRESH_MEAN_C, 5, params.tripThreshold * 0.256f);
                        } else {
                            Imgproc.threshold(trips_image, trips_image, params.tripThreshold, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV
                                            : Imgproc.THRESH_BINARY));
                        }

                    switch (params.tripMorphOps) {
                    case dilate:
                        Imgproc.dilate(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    case erode:
                        Imgproc.erode(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    default:
                        break;
                    }

                    //Imgproc.HoughCircles(tres, circ, Imgproc.CV_HOUGH_GRADIENT, 1, tres.height()/8, 80, 1+p.par4, p.par5, p.par6);
                    Imgproc.HoughCircles(trips_image, circles, Imgproc.CV_HOUGH_GRADIENT, params.tripDP,
                            params.tripCenterDist, params.tripCannyThresh, params.tripAccumThresh,
                            params.tripRadMin, params.tripRadMax);

                    for (int i = 0; i < circles.cols(); i++) {

                        double[] coords = circles.get(0, i);

                        if (coords == null || coords[0] <= 1 || coords[1] <= 1)
                            continue; //If the circle is off the limits, or too small, don't process it.

                        TripcodeCandidateSample tc = new TripcodeCandidateSample(undistorted_image, coords);

                        if (tc.isValid())
                            tripcodeData.add(tc);

                    }

                    if (selectedView == View.trip)
                        preview_image = trips_image.clone();
                    trips_image.release();

                }

                if (preview_image != null) {
                    camPreviewPanel.updatePreviewImage(preview_image);
                    preview_image.release();
                }

            } else {
                System.out.println("frame/cam failiure!");
            }

        } catch (Exception e) {
            e.printStackTrace();
            running = false;
        }

        //FPS calculations
        if (camPreviewPanel != null) {
            long t = System.currentTimeMillis();
            detectTime = (t - lastFrameDetectTime);
            lastFrameDetectTime = t;
            camPreviewPanel.updateDetectTime(detectTime);
        }

    }

    //De-init
    circles.release();
    undistorted_image.release();
    input_image.release();
    inputVideo.release();
    shiftMat.release();
}

From source file:formularios.FrmCamera.java

private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
    // TODO add your handling code here:
    System.out.println("Hello, OpenCV");
    // Load the native library.
    System.out.println(System.getProperty("java.library.path"));
    System.loadLibrary("opencv-300");

    VideoCapture camera = new VideoCapture(0);
    camera.open(0); //Useless
    if (!camera.isOpened()) {
        System.out.println("Camera Error");
    } else {/*from  w  w  w. j  av a  2 s.  c  o  m*/
        System.out.println("Camera OK?");
    }

    Mat frame = new Mat();

    //camera.grab();
    //System.out.println("Frame Grabbed");
    //camera.retrieve(frame);
    //System.out.println("Frame Decoded");

    camera.read(frame);
    System.out.println("Frame Obtained");

    /* No difference
    camera.release();
    */

    System.out.println("Captured Frame Width " + frame.width());

    Imgcodecs.imwrite("camera.jpg", frame);
    System.out.println("OK");

}

From source file:fr.olympicinsa.riocognized.facedetector.tools.Treatment.java

/**
 * Images should be resized and grayscaled before eigenfaces. Then, egalized
 * its histogram// w ww  . j a v a2 s .c om
 *
 * @param image Mat of image to treat
 * @return Mat of grayscale equalized image
 */
public static Mat beforeDetection(Mat image) {
    Mat gray = new Mat();
    cvtColor(image, gray, COLOR_BGR2GRAY);
    equalizeHist(gray, gray);
    return gray;
}

From source file:fr.olympicinsa.riocognized.facedetector.tools.Treatment.java

public static Mat resize(Mat image, int maxW) {
    Mat resizeImage = new Mat();
    log.info("Image size: " + image.width() + "," + image.height());
    double f = (double) image.width() / (double) maxW;
    log.info("Resize factor : " + f);
    int w = (int) (image.width() / f);
    int h = (int) (image.height() / f);
    Size sz = new Size(w, h);
    log.info("Resizing image to : " + w + "," + h);
    Imgproc.resize(image, resizeImage, sz);
    return resizeImage;
}

From source file:frclib.FrcOpenCVDetector.java

License:Open Source License

/**
 * Constructor: Create an instance of the object.
 *
 * @param instanceName specifies the instance name.
 * @param videoIn specifies the video input stream.
 * @param videoOut specifies the video output stream.
 * @param numImageBuffers specifies the number of image buffers to allocate.
 * @param detectedObjectBuffers specifies the array of preallocated detected object buffers.
 *//*from w w  w.ja v  a 2 s  . c  o  m*/
public FrcOpenCVDetector(final String instanceName, CvSink videoIn, CvSource videoOut, int numImageBuffers,
        O[] detectedObjectBuffers) {
    if (debugEnabled) {
        dbgTrace = new TrcDbgTrace(moduleName, tracingEnabled, traceLevel, msgLevel);
    }

    this.instanceName = instanceName;
    this.videoIn = videoIn;
    this.videoOut = videoOut;
    //
    // Pre-allocate the image buffers.
    //
    Mat[] imageBuffers = new Mat[numImageBuffers];
    for (int i = 0; i < imageBuffers.length; i++) {
        imageBuffers[i] = new Mat();
    }

    visionTask = new TrcVisionTask<>(instanceName, this, imageBuffers, detectedObjectBuffers);
}

From source file:fuzzycv.MainFrame.java

private Mat docanny(Mat frame) {
    int treshValue = thresholdSlider.getValue();
    Mat cvtImg = new Mat();
    Mat detectedEdges = new Mat();

    Imgproc.cvtColor(frame, cvtImg, Imgproc.COLOR_BGR2GRAY);
    Imgproc.blur(cvtImg, detectedEdges, new Size(3.0, 3.0));

    Imgproc.Canny(detectedEdges, detectedEdges, treshValue, treshValue * 3, 3, false);

    Mat mask = new Mat();
    Core.add(mask, Scalar.all(0), mask);
    frame.copyTo(mask, detectedEdges);/*from   ww w. j a  v  a 2 s . c om*/

    return mask;
}

From source file:fuzzycv.MainFrame.java

private Mat removeBG(Mat frame) {

    Mat hsvImg = new Mat();
    List<Mat> hsvPlanes = new ArrayList<>();
    Mat thresholdImg = new Mat();

    //threshold the image with the histogram average value
    hsvImg.create(frame.size(), CvType.CV_8U);
    Imgproc.cvtColor(frame, hsvImg, Imgproc.COLOR_BGR2HSV);
    Core.split(hsvImg, hsvPlanes);//from   w w  w .  j  ava2 s.  c om

    double threshValue = getHistoAvg(hsvImg, hsvPlanes.get(0));

    if (inverseCheckBox.isSelected()) {
        Imgproc.threshold(hsvPlanes.get(0), thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY_INV);
    } else {
        Imgproc.threshold(hsvPlanes.get(0), thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY);
    }

    Imgproc.blur(thresholdImg, thresholdImg, new Size(5, 5));

    // dilate to fill gaps, erode to smooth edges
    Imgproc.dilate(thresholdImg, thresholdImg, new Mat(), new Point(-1, 1), 6);
    Imgproc.erode(thresholdImg, thresholdImg, new Mat(), new Point(-1, 1), 6);

    Imgproc.threshold(thresholdImg, thresholdImg, threshValue, 179.0, Imgproc.THRESH_BINARY);

    // create the new image
    Mat foreground = new Mat(frame.size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
    frame.copyTo(foreground, thresholdImg);

    return foreground;
}

From source file:fuzzycv.MainFrame.java

private Mat findAndDrawCrust(Mat maskedImage, Mat frame) {

    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();

    Imgproc.findContours(maskedImage, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);
    //if any contour exist...
    if (hierarchy.size().height > 0 && hierarchy.size().width > 0) {
        //for each contour, display it in blue
        for (int idx = 0; idx >= 0; idx = (int) hierarchy.get(0, idx)[0]) {
            Imgproc.drawContours(frame, contours, idx, new Scalar(160, 0, 0));
        }// w ww  .ja  va  2  s  .  c o  m
    }

    return frame;
}