Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:in.fabinpaul.sixthsense.ColorBlobDetectionFragment.java

License:Apache License

public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();/*from w  w w .j a  v a2 s . co m*/
    Mat[] colorLabel = new Mat[4];
    org.opencv.core.Point markersXY[] = new org.opencv.core.Point[4];
    for (int i = 0; i < 4; i++) {

        colorLabel[i] = new Mat();

        if (mIsColorSelected[i]) {
            mDetector[i].process(mRgba);
            List<MatOfPoint> contours = mDetector[i].getContours();
            Log.e(TAG, "Contours count: " + contours.size());
            Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
            markersXY[i] = new org.opencv.core.Point();
            markersXY[i] = mDetector[i].getXY();
            Log.i(TAG, "Point:X" + markersXY[i].x + " Y:" + markersXY[i].y);
            switch (i) {
            case 0:
                colorLabel[i] = mRgba.submat(4, 68, 4, 68);
                break;
            case 1:
                colorLabel[i] = mRgba.submat(4, 68, mRgba.cols() - 68, mRgba.cols() - 4);
                break;
            case 2:
                colorLabel[i] = mRgba.submat(mRgba.rows() - 68, mRgba.rows() - 4, 4, 68);
                break;
            case 3:
                colorLabel[i] = mRgba.submat(mRgba.rows() - 68, mRgba.rows() - 4, mRgba.cols() - 68,
                        mRgba.cols() - 4);
                break;
            }
            colorLabel[i].setTo(mBlobColorRgba[i]);
        }
    }

    return mRgba;
}

From source file:in.fabinpaul.sixthsense.ColorBlobDetectionFragment.java

License:Apache License

@Override
public boolean onTouch(View v, MotionEvent event) {
    if (event.getAction() == MotionEvent.ACTION_DOWN) {
        count++;//from   www .j a  v a  2s . c om
        if (count > 3)
            count = 0;

        if (count == 3) {
            colorMarkerSet = true;
            comm.saveButtonVisibility();
        }

        int cols = mRgba.cols();
        int rows = mRgba.rows();

        int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
        int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
        Log.i(TAG, "x coordinates" + event.getX() + "y coordinates" + event.getY());
        Log.i(TAG, "View width" + mOpenCvCameraView.getWidth() + "View Height" + mOpenCvCameraView.getHeight());

        int x = (int) event.getX() - xOffset;
        int y = (int) event.getY() - yOffset;

        if ((x < 0) || (y < 0) || (x > cols) || (y > rows))
            return false;

        Rect touchedRect = new Rect();

        touchedRect.x = (x > 4) ? x - 4 : 0;
        touchedRect.y = (y > 4) ? y - 4 : 0;

        touchedRect.width = (x + 4 < cols) ? x - 1 - touchedRect.x : cols - touchedRect.x;
        touchedRect.height = (y + 4 < rows) ? y - 1 - touchedRect.y : rows - touchedRect.y;

        Log.i(TAG, "Width" + touchedRect.width + " Height" + touchedRect.height);
        Log.i(TAG, "Column" + cols + " Rows" + rows);
        Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");

        Mat touchedRegionRgba = mRgba.submat(touchedRect);

        Mat touchedRegionHsv = new Mat();
        Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);

        // Calculate average color of touched region
        mBlobColorHsv[count] = Core.sumElems(touchedRegionHsv);
        int pointCount = touchedRect.width * touchedRect.height;
        for (int i = 0; i < mBlobColorHsv[count].val.length; i++)
            mBlobColorHsv[count].val[i] /= pointCount;

        mBlobColorRgba[count] = converScalarHsv2Rgba(mBlobColorHsv[count]);

        Log.i(TAG, "Before" + mBlobColorHsv[count].val[0] + " " + mBlobColorHsv[count].val[1] + " "
                + mBlobColorHsv[count].val[2]);
        Log.i(TAG, "After" + mBlobColorRgba[count].val[0] + " " + mBlobColorRgba[count].val[1] + " "
                + mBlobColorRgba[count].val[2]);

        Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba[count].val[0] + ", " + mBlobColorRgba[count].val[1]
                + ", " + mBlobColorRgba[count].val[2] + ", " + mBlobColorRgba[count].val[3] + ")");

        // mDetector[count].setHsvColor(mBlobColorHsv[count]);
        setHSV(count);

        mIsColorSelected[count] = true;

        touchedRegionRgba.release();
        touchedRegionHsv.release();
    }
    return true; // don't need subsequent touch events
}

From source file:in.fabinpaul.sixthsense.ColorBlobDetectionFragment.java

License:Apache License

private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
    Mat pointMatRgba = new Mat();
    Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
    Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);

    return new Scalar(pointMatRgba.get(0, 0));
}

From source file:in.fabinpaul.sixthsense.ColorBlobDetector.java

License:Apache License

public void process(Mat rgbaImage) {
    Imgproc.pyrDown(rgbaImage, mPyrDownMat);
    Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

    Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);

    Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
    Imgproc.dilate(mMask, mDilatedMask, new Mat());

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    // Find max contour area
    double maxArea = 0;
    Iterator<MatOfPoint> each = contours.iterator();
    while (each.hasNext()) {
        MatOfPoint wrapper = each.next();
        double area = Imgproc.contourArea(wrapper);
        if (area > maxArea)
            maxArea = area;/*  w ww  . ja va  2 s .  co m*/
    }

    // Filter contours by area and resize to fit the original image size
    mContours.clear();
    each = contours.iterator();
    while (each.hasNext()) {
        MatOfPoint contour = each.next();
        if (Imgproc.contourArea(contour) > mMinContourArea * maxArea) {
            Core.multiply(contour, new Scalar(4, 4), contour);
            mContours.add(contour);
        }
    }
}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

public Scalar process(Bitmap bmp) {
    // convert the image to OpenCV format
    Log.d("bifrostcore", "create original image");
    Mat original_alpha = new Mat();
    Assert.assertNotNull(original_alpha);
    Utils.bitmapToMat(bmp, original_alpha);
    // remove alpha
    Mat original = new Mat();
    Imgproc.cvtColor(original_alpha, original, Imgproc.COLOR_RGBA2RGB, 0);
    Log.d("bifrostcore", "image size: " + String.valueOf(original.total()));

    // compute an ROI
    Mat roi = compute_roi(original);//from   w w w .ja  va2s  .c  o m

    Log.d("bifrostcore", "smooth image");
    // smooth the image
    Mat smoothed = smooth_image(original);

    Log.d("bifrostcore", "convert to hsv");
    Mat hsv = toHSV(smoothed);

    Log.d("bifrostcore", "extract main region");
    // extract main region using histogram
    Mat main_region = extract_main_region(hsv, roi);

    // threshold to preserve only the most significant regions
    Mat main_region_threshold = threshold_mask(main_region);
    saveImage(main_region_threshold);

    Log.d("bifrostcore", "return mean value");
    // return the mean value
    return Core.mean(original, main_region_threshold);
}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

private Mat compute_roi(Mat original) {
    Mat roi = new Mat();
    Imgproc.cvtColor(original, roi, Imgproc.COLOR_BGR2GRAY, 0);
    roi.setTo(new Scalar(0, 0, 0));
    int x = original.width();
    int y = original.height();
    int cx = x / 2;
    int cy = y / 2;
    int r = Math.min(cx, cy) * 2 / 3;
    Core.circle(roi, new Point(cx, cy), r, new Scalar(255, 255, 255), -1, 8, 0);
    return roi;//from w w  w.  j  av a 2  s.  c  o  m
}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

private Mat toHSV(Mat img) {
    Mat hsv = new Mat();
    Imgproc.cvtColor(img, hsv, Imgproc.COLOR_BGR2HSV);
    return hsv;
}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

private Mat extract_main_region(Mat img, Mat roi) {
    Mat hist = new Mat();
    int h_bins = 30;
    int s_bins = 32;
    MatOfInt mHistSize = new MatOfInt(h_bins, s_bins);

    MatOfFloat mRanges = new MatOfFloat(0, 179, 0, 255);
    MatOfInt mChannels = new MatOfInt(0, 1);

    Imgproc.calcHist(Arrays.asList(img), mChannels, roi, hist, mHistSize, mRanges, false);

    Core.normalize(hist, hist, 0, 255, Core.NORM_MINMAX, -1, new Mat());

    Mat backproj = new Mat();
    Imgproc.calcBackProject(Arrays.asList(img), mChannels, hist, backproj, mRanges, 1);

    Log.w("bifrostcore",
            "Number of pixels in the biggest region: " + String.valueOf(Core.countNonZero(backproj)));
    return backproj.mul(roi);
}

From source file:interactivespaces.activity.image.vision.opencv.outline.ImageOpenCvVisionOutlineActivity.java

License:Apache License

/**
 * Detect all the edges in the image and make only the edges visible.
 *
 * @param image/*from  w  w  w .  ja  v  a  2 s .com*/
 *          the image to be processed
 * @param dst
 *          the destination image
 */
private void edgeify(Mat image, Mat dst) {
    Mat gray = new Mat();
    Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY);

    Imgproc.medianBlur(gray, gray, BLUR_FILTER_SIZE);

    Mat edges = new Mat();
    Imgproc.Laplacian(gray, edges, CvType.CV_8U, LAPLACIAN_KERNEL_SIZE, 1, 0);

    Imgproc.threshold(edges, dst, EDGES_THRESHOLD, MAXIMUM_THRESHOLD_VALUE, Imgproc.THRESH_BINARY_INV);
}

From source file:interactivespaces.service.image.vision.opencv.OpenCvVideoLoop.java

License:Apache License

@Override
protected void loop() throws InterruptedException {
    Mat frame = new Mat();
    capture.grab();//from  w ww .  j a v  a2  s .  c  o m
    capture.retrieve(frame);
    if (frame.empty()) {
        log.warn("No image");
        return;
    }

    notifyListenersNewVideoFrame(frame);
}