Example usage for org.opencv.core Mat get

List of usage examples for org.opencv.core Mat get

Introduction

In this page you can find the example usage for org.opencv.core Mat get.

Prototype

public double[] get(int row, int col) 

Source Link

Usage

From source file:edu.sust.cse.util.Histogram.java

public static Mat getHistogram(Mat image) {

    try {// w ww  . j a  va 2 s. co m
        Mat src = new Mat(image.height(), image.width(), CvType.CV_8UC2);
        Imgproc.cvtColor(image, src, Imgproc.COLOR_RGB2GRAY);
        ArrayList<Mat> bgr_planes = new ArrayList<>();
        Core.split(src, bgr_planes);

        MatOfInt histSize = new MatOfInt(256);

        final MatOfFloat histRange = new MatOfFloat(0f, 256f);

        boolean accumulate = false;

        Mat b_hist = new Mat();

        Imgproc.calcHist(bgr_planes, new MatOfInt(0), new Mat(), b_hist, histSize, histRange, accumulate);

        int hist_w = 512;
        int hist_h = 600;
        long bin_w;
        bin_w = Math.round((double) (hist_w / 256));

        Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC1);

        Core.normalize(b_hist, b_hist, 3, histImage.rows(), Core.NORM_MINMAX);

        for (int i = 1; i < 256; i++) {

            Core.line(histImage, new Point(bin_w * (i - 1), hist_h - Math.round(b_hist.get(i - 1, 0)[0])),
                    new Point(bin_w * (i), hist_h - Math.round(Math.round(b_hist.get(i, 0)[0]))),
                    new Scalar(255, 0, 0), 2, 8, 0);

        }

        return histImage;
    } catch (Exception ex) {
        System.out.println("[HISTOGRAM][ERROR][" + ex.getMessage() + "]");
        return null;
    }
}

From source file:emotion.Eye.java

public void examineEyeOpeness(boolean rightEyeFlag) {
    Rect pureEyeRegion;/*from   w  w w  .  j  a v a  2s  .co  m*/
    //We take just middle half of strict eye region determined
    //by localized eye corners
    if (rightEyeFlag) {
        double regionWidth = EyeRegion.rightOuterEyeCorner.x - EyeRegion.rightInnerEyeCorner.x;
        pureEyeRegion = new Rect((int) (EyeRegion.rightInnerEyeCorner.x + regionWidth / 2 - 2),
                (int) (Eye.rightRect.y), (4), Eye.rightRect.height);
        imwrite("strictEyeRegRight.jpg", new Mat(EyeRegion._face, pureEyeRegion));
        //Setting x coordinates of eyelids
        EyeRegion.rightLowerEyelid.x = (EyeRegion.rightOuterEyeCorner.x + EyeRegion.rightInnerEyeCorner.x) / 2;
        EyeRegion.rightUpperEyelid.x = EyeRegion.rightLowerEyelid.x;
        EyeRegion.rightEyeOpeness = (EyeRegion.rightUpperEyelid.y - EyeRegion.rightLowerEyelid.y);
    } else {
        double regionWidth;
        regionWidth = EyeRegion.leftInnerEyeCorner.x - EyeRegion.leftOuterEyeCorner.x;
        pureEyeRegion = new Rect((int) (regionWidth / 2 + EyeRegion.leftOuterEyeCorner.x - 2),
                (int) (Eye.leftRect.y), (4), Eye.leftRect.height);
        imwrite("leftEyeReg.jpg", new Mat(EyeRegion._face, pureEyeRegion));
        //Setting x coordinates of eyelids
        EyeRegion.leftLowerEyelid.x = (EyeRegion.leftInnerEyeCorner.x + EyeRegion.leftOuterEyeCorner.x) / 2;
        EyeRegion.leftUpperEyelid.x = EyeRegion.leftLowerEyelid.x;
        EyeRegion.leftEyeOpeness = (EyeRegion.leftUpperEyelid.y - EyeRegion.leftLowerEyelid.y);
    }

    Mat strictEyeRegion = new Mat(EyeRegion._face, pureEyeRegion);
    Mat result = new Mat();

    strictEyeRegion.convertTo(strictEyeRegion, CvType.CV_32F);
    Core.pow(strictEyeRegion, 1.27, strictEyeRegion);
    cvtColor(strictEyeRegion, strictEyeRegion, Imgproc.COLOR_BGR2GRAY);
    imwrite("improved.jpg", strictEyeRegion);

    threshold(strictEyeRegion, result, 100, 255, Imgproc.THRESH_BINARY_INV);

    Mat strEl = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 1));
    dilate(result, result, strEl, new Point(1, 0), 3);

    for (int i = 0; i < result.width(); i++) {
        for (int j = 0; j < result.height() * 0.4; j++) {
            result.put(j, i, new double[] { 0, 0, 0 });
        }
    }
    for (int j = result.height() - 1; j >= 0; j--) {
        if (result.get(j, 0)[0] == 255) {
            if (rightEyeFlag) {

                if (EyeRegion.rightLowerEyelid.y == 0) {
                    EyeRegion.rightLowerEyelid.y = j + 3;
                    EyeRegion.rightLowerEyelid.y += Eye.rightRect.y;
                }
                EyeRegion.rightUpperEyelid.y = j;
                EyeRegion.rightUpperEyelid.y += Eye.rightRect.y;
            } else {
                if (EyeRegion.leftLowerEyelid.y == 0) {
                    EyeRegion.leftLowerEyelid.y = j + 3;
                    EyeRegion.leftLowerEyelid.y += Eye.leftRect.y;
                }
                EyeRegion.leftUpperEyelid.y = j;
                EyeRegion.leftUpperEyelid.y += Eye.leftRect.y;
            }
        }
    }
    imwrite("openessResult.jpg", result);
}

From source file:emotion.EyeRegion.java

public static void areEyebrowsWrinkles() {
    //setting parameters
    int height = (int) (abs(rightInnerEyebrowsCorner.y - rightInnerEyeCorner.y) * 1.2);
    int width = (int) (rightInnerEyeCorner.x - leftInnerEyeCorner.x);
    int y = (int) (rightInnerEyebrowsCorner.y - height / 2);
    int x = (int) leftInnerEyebrowsCorner.x;

    Rect wrinklesRect = new Rect(x, y, width, height);
    Mat wrinklesArea = new Mat(_face, wrinklesRect).clone();

    wrinklesThreshold = (int) (wrinklesArea.width() * wrinklesArea.height() * 0.085);
    //Wrinkles between eyebrows are vertical
    int[] gradientMask = new int[9];
    gradientMask[0] = -1;//  w ww  .j av  a  2  s  .  c o  m
    gradientMask[1] = 0;
    gradientMask[2] = 1;
    gradientMask[3] = -5;
    gradientMask[4] = 0;
    gradientMask[5] = 5;
    gradientMask[6] = -1;
    gradientMask[7] = 0;
    gradientMask[8] = 1;

    wrinklesArea.convertTo(wrinklesArea, CvType.CV_32F);
    Imgproc.cvtColor(wrinklesArea, wrinklesArea, Imgproc.COLOR_BGR2GRAY);
    Core.pow(wrinklesArea, 1.09, wrinklesArea);
    imwrite("wrinklesArea.jpg", wrinklesArea);

    wrinklesArea = StaticFunctions.convolution(gradientMask, wrinklesArea);
    threshold(wrinklesArea, wrinklesArea, 110, 255, Imgproc.THRESH_BINARY);
    imwrite("wrinklesAreaGradiented.jpg", wrinklesArea);

    long wrinklesPoints = 0;
    for (int i = 0; i < wrinklesArea.width(); i++) {
        for (int j = 0; j < wrinklesArea.height(); j++) {
            if (wrinklesArea.get(j, i)[0] == 255) {
                wrinklesPoints++;
            }
        }
    }
    EyeRegion.wrinklesFactor = wrinklesPoints;
    //        System.out.println("Wrinkles factor: "+wrinklesPoints);
    if (wrinklesPoints >= wrinklesThreshold) {
        //            System.out.println("Expression wrinkles detected! Threshold exceeded");
        Imgproc.rectangle(EyeRegion._face, wrinklesRect.br(), wrinklesRect.tl(), new Scalar(0, 50, 205));
    }
}

From source file:es.ugr.osgiliart.features.opencv.Histogram.java

License:Open Source License

@Override
public double[] extract(Mat image) {
    Mat hsvImage = new Mat(image.width(), image.height(), image.type());
    Mat histHue = new Mat();
    Mat histSaturation = new Mat();

    Imgproc.cvtColor(image, hsvImage, Imgproc.COLOR_BGR2HSV);
    List<Mat> channels = new ArrayList<Mat>();
    Core.split(hsvImage, channels);//from  w  w w . ja  v a  2  s .  c om

    //Histogram for hue
    Imgproc.calcHist(Arrays.asList(new Mat[] { channels.get(0) }), new MatOfInt(0), new Mat(), histHue,
            new MatOfInt(BINS), new MatOfFloat(MIN_VALUE, MAX_VALUE));

    //Histogram for saturation
    Imgproc.calcHist(Arrays.asList(new Mat[] { channels.get(1) }), new MatOfInt(0), new Mat(), histSaturation,
            new MatOfInt(BINS), new MatOfFloat(MIN_VALUE, MAX_VALUE));

    double sum = Core.sumElems(histHue).val[0];
    double[] values = new double[histHue.height() + histSaturation.height()];
    int k = 0;
    for (int i = 0; i < histHue.height(); ++i) {
        values[k++] = histHue.get(i, 0)[0] / sum;
    }
    sum = Core.sumElems(histSaturation).val[0];
    for (int i = 0; i < histSaturation.height(); ++i) {
        values[k++] = histSaturation.get(i, 0)[0] / sum;
    }
    return values;
}

From source file:es.ugr.osgiliart.features.opencv.MatchImage.java

License:Open Source License

public double match(String path) {
    Mat img = Highgui.imread(path);/*from  w  ww .j  av  a 2s  .  c o m*/
    Mat resizedImg = new Mat(SIZE, SIZE, img.type());
    //Mat blurredImg = new Mat();
    Imgproc.resize(img, resizedImg, new Size(SIZE, SIZE));
    //Imgproc.blur(resizedImg, blurredImg, new Size(FILTER_SIZE,FILTER_SIZE) );

    ArrayList<Mat> channels = new ArrayList<Mat>();

    Core.split(resizedImg, channels);

    int conta = 0;

    double corrcoef = 0;
    for (int i = 0; i < 1; ++i) {
        /*      
              for(int px = 0; px < SIZE; px++){
                 for(int py = 0; py < SIZE; py++){
                    if(resizedImg.get(px, py)[i]!=0.0){
          double im_orig = templateChannels.get(i).get(px, py)[0];
          double im_indi = resizedImg.get(px, py)[i];
                  
          corrcoef +=  Math.pow(im_orig ,2) - Math.pow(im_indi, 2);
          conta++;
                    }
                            
                            
                 }
              }*/

        Mat result = new Mat();
        Imgproc.matchTemplate(channels.get(i), templateChannels.get(i), result, Imgproc.TM_CCOEFF_NORMED);
        //Imgproc.matchTemplate(channels.get(i), templateChannels.get(i), result, Imgproc.TM_SQDIFF);
        corrcoef += result.get(0, 0)[0];
        //corrcoef += result.get(0, 0)[0];
    }
    corrcoef /= 3.0;
    //return (corrcoef/conta/(255*3));
    return (corrcoef);
}

From source file:es.ugr.osgiliart.features.opencv.MatchImageNoBackground.java

License:Open Source License

public double match(String path) {

    Mat img = Highgui.imread(path);/*ww  w  .  java 2  s. c  o m*/
    Mat imgResized = new Mat(SIZE, SIZE, img.type());
    Imgproc.resize(img, imgResized, new Size(SIZE, SIZE));

    double distance = 0;

    for (int i = 0; i < SIZE; i++) {
        for (int j = 0; j < SIZE; j++) {
            if ((imgResized.get(i, j)[0] == 255) && (imgResized.get(i, j)[1] == 255)
                    && (imgResized.get(i, j)[2] == 255)) {

                distance += PENALTY;
            } else {
                distance += Math.pow(imgResized.get(i, j)[0] - templateResized.get(i, j)[0], 2)
                        + Math.pow(imgResized.get(i, j)[1] - templateResized.get(i, j)[1], 2)
                        + Math.pow(imgResized.get(i, j)[2] - templateResized.get(i, j)[2], 2);
            }
        }
    }

    return -distance;
}

From source file:fi.conf.tabare.ARDataProvider.java

private void detect() {

    //Mat composite_image;
    Mat input_image = new Mat();
    Mat undistorted_image = new Mat();
    Mat circles = new Mat();
    MatOfKeyPoint mokp = new MatOfKeyPoint();
    Mat cameraMatrix = null;//from ww  w.  j a v a2s  .c om

    //List<Mat> channels = new LinkedList<>();

    //Loop
    while (running) {
        try {
            if (inputVideo.read(input_image)) {
                Mat preview_image = null;

                if (selectedView == View.calib)
                    preview_image = input_image.clone();

                //Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_RGB2HSV);
                //Core.split(input_image, channels);

                Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_BGR2GRAY);

                //Imgproc.equalizeHist(input_image, input_image);

                input_image.convertTo(input_image, -1, params.contrast, params.brightness); //image*contrast[1.0-3.0] + brightness[0-255]

                doBlur(input_image, input_image, params.blur, params.blurAmount);

                if (selectedView == View.raw)
                    preview_image = input_image.clone();

                if (params.enableDistortion) {

                    if (cameraMatrix == null)
                        cameraMatrix = Imgproc.getDefaultNewCameraMatrix(Mat.eye(3, 3, CvType.CV_64F),
                                new Size(input_image.width(), input_image.height()), true);

                    Imgproc.warpAffine(input_image, input_image, shiftMat, frameSize);

                    if (undistorted_image == null)
                        undistorted_image = new Mat((int) frameSize.width * 2, (int) frameSize.height * 2,
                                CvType.CV_64F);

                    Imgproc.undistort(input_image, undistorted_image, cameraMatrix, distCoeffs);

                    input_image = undistorted_image.clone();

                    if (selectedView == View.dist)
                        preview_image = input_image.clone();

                }

                //               if(background == null) background = input_image.clone();         
                //               if(recaptureBg){
                //                  backgSubstractor.apply(background, background);
                //                  System.out.println(background.channels() + " " + background.size() );
                //                  System.out.println(input_image.channels() + " " + input_image.size() );
                //                  recaptureBg = false;
                //               }
                //               if(dynamicBGRemoval){
                //                  //Imgproc.accumulateWeighted(input_image, background, dynamicBGAmount);
                //                  //Imgproc.accumulateWeighted(input_image, background, 1.0f);
                //                  //Core.subtract(input_image, background, input_image);
                //                  //Core.bitwise_xor(input_image, background, input_image);
                //
                //                  doBlur(input_image, background, Blur.normal_7x7, 0); //Blur a little, to get nicer result when substracting
                //                  backgSubstractor.apply(background, background, dynamicBGAmount);
                //               }
                //               if(background != null) Core.add(input_image, background, input_image);

                if (params.blobTracking) {
                    Mat blobs_image = input_image.clone();

                    Imgproc.threshold(blobs_image, blobs_image, params.blobThreshold, 254,
                            (params.blobThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY));

                    Size kernelSize = null;

                    switch (params.blobMorpthKernelSize) {
                    case size_3x3:
                        kernelSize = new Size(3, 3);
                        break;
                    case size_5x5:
                        kernelSize = new Size(5, 5);
                        break;
                    case size_7x7:
                        kernelSize = new Size(7, 7);
                        break;
                    case size_9x9:
                        kernelSize = new Size(9, 9);
                        break;
                    }

                    int kernelType = -1;

                    switch (params.blobMorphKernelShape) {
                    case ellipse:
                        kernelType = Imgproc.MORPH_ELLIPSE;
                        break;
                    case rect:
                        kernelType = Imgproc.MORPH_RECT;
                        break;
                    default:
                        break;
                    }

                    switch (params.blobMorphOps) {
                    case dilate:
                        Imgproc.dilate(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    case erode:
                        Imgproc.erode(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    default:
                        break;
                    }

                    if (blobFeatureDetector == null)
                        blobFeatureDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);

                    blobFeatureDetector.detect(blobs_image, mokp);
                    blobData.add(mokp);

                    if (selectedView == View.blob)
                        preview_image = blobs_image.clone();

                    blobs_image.release();
                }

                if (params.tripTracking) {

                    Mat trips_image = undistorted_image.clone();

                    if (params.tripEnableThresholding)
                        if (params.tripAdaptThreshold) {
                            Imgproc.adaptiveThreshold(trips_image, trips_image, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY),
                                    Imgproc.ADAPTIVE_THRESH_MEAN_C, 5, params.tripThreshold * 0.256f);
                        } else {
                            Imgproc.threshold(trips_image, trips_image, params.tripThreshold, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV
                                            : Imgproc.THRESH_BINARY));
                        }

                    switch (params.tripMorphOps) {
                    case dilate:
                        Imgproc.dilate(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    case erode:
                        Imgproc.erode(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    default:
                        break;
                    }

                    //Imgproc.HoughCircles(tres, circ, Imgproc.CV_HOUGH_GRADIENT, 1, tres.height()/8, 80, 1+p.par4, p.par5, p.par6);
                    Imgproc.HoughCircles(trips_image, circles, Imgproc.CV_HOUGH_GRADIENT, params.tripDP,
                            params.tripCenterDist, params.tripCannyThresh, params.tripAccumThresh,
                            params.tripRadMin, params.tripRadMax);

                    for (int i = 0; i < circles.cols(); i++) {

                        double[] coords = circles.get(0, i);

                        if (coords == null || coords[0] <= 1 || coords[1] <= 1)
                            continue; //If the circle is off the limits, or too small, don't process it.

                        TripcodeCandidateSample tc = new TripcodeCandidateSample(undistorted_image, coords);

                        if (tc.isValid())
                            tripcodeData.add(tc);

                    }

                    if (selectedView == View.trip)
                        preview_image = trips_image.clone();
                    trips_image.release();

                }

                if (preview_image != null) {
                    camPreviewPanel.updatePreviewImage(preview_image);
                    preview_image.release();
                }

            } else {
                System.out.println("frame/cam failiure!");
            }

        } catch (Exception e) {
            e.printStackTrace();
            running = false;
        }

        //FPS calculations
        if (camPreviewPanel != null) {
            long t = System.currentTimeMillis();
            detectTime = (t - lastFrameDetectTime);
            lastFrameDetectTime = t;
            camPreviewPanel.updateDetectTime(detectTime);
        }

    }

    //De-init
    circles.release();
    undistorted_image.release();
    input_image.release();
    inputVideo.release();
    shiftMat.release();
}

From source file:fuzzycv.MainFrame.java

private Mat findAndDrawCrust(Mat maskedImage, Mat frame) {

    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();

    Imgproc.findContours(maskedImage, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);
    //if any contour exist...
    if (hierarchy.size().height > 0 && hierarchy.size().width > 0) {
        //for each contour, display it in blue
        for (int idx = 0; idx >= 0; idx = (int) hierarchy.get(0, idx)[0]) {
            Imgproc.drawContours(frame, contours, idx, new Scalar(160, 0, 0));
        }//from w  w w  .  j a v  a  2  s  . c o m
    }

    return frame;
}

From source file:fuzzycv.MainFrame.java

/**
 * Get the average value of the histogram representing the image Hue
 * component// w w w  .j a  v  a  2s .  c  o m
 *
 * @param hsvImg
 *            the current frame in HSV
 * @param hueValues
 *            the Hue component of the current frame
 * @return the average value
 */
private double getHistoAvg(Mat hsvImg, Mat hueValues) {
    double average = 0.0;
    Mat hist_hue = new Mat();
    MatOfInt histSize = new MatOfInt(180);
    List<Mat> hue = new ArrayList<>();
    hue.add(hueValues);

    //compute the histogram
    Imgproc.calcHist(hue, new MatOfInt(0), new Mat(), hist_hue, histSize, new MatOfFloat(0, 179), true);
    // get the average for each bin
    for (int h = 0; h < 180; h++) {
        average += (hist_hue.get(h, 0)[0] * h);
    }
    return average = average / hsvImg.size().height / hsvImg.size().width;
}

From source file:gab.opencv.OpenCV.java

License:Open Source License

public ArrayList<Line> findLines(int threshold, double minLineLength, double maxLineGap) {
    ArrayList<Line> result = new ArrayList<Line>();

    Mat lineMat = new Mat();
    Imgproc.HoughLinesP(getCurrentMat(), lineMat, 1, PConstants.PI / 180.0, threshold, minLineLength,
            maxLineGap);// w w  w. j  a  va2s  .  com
    for (int i = 0; i < lineMat.width(); i++) {
        double[] coords = lineMat.get(0, i);
        result.add(new Line(coords[0], coords[1], coords[2], coords[3]));
    }

    return result;
}