Example usage for org.opencv.core MatOfPoint2f MatOfPoint2f

List of usage examples for org.opencv.core MatOfPoint2f MatOfPoint2f

Introduction

In this page you can find the example usage for org.opencv.core MatOfPoint2f MatOfPoint2f.

Prototype

public MatOfPoint2f(Point... a) 

Source Link

Usage

From source file:LicenseDetection.java

public void run() {

    // ------------------ set up tesseract for later use ------------------
    ITesseract tessInstance = new Tesseract();
    tessInstance.setDatapath("/Users/BradWilliams/Downloads/Tess4J");
    tessInstance.setLanguage("eng");

    // ------------------  Save image first ------------------
    Mat img;/*from w w  w.  j a  v  a  2s.c o  m*/
    img = Imgcodecs.imread(getClass().getResource("/resources/car_2_shopped2.jpg").getPath());
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/True_Image.png", img);

    // ------------------ Convert to grayscale ------------------
    Mat imgGray = new Mat();
    Imgproc.cvtColor(img, imgGray, Imgproc.COLOR_BGR2GRAY);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/Gray.png", imgGray);

    // ------------------ Blur so edge detection wont pick up noise ------------------
    Mat imgGaussianBlur = new Mat();
    Imgproc.GaussianBlur(imgGray, imgGaussianBlur, new Size(3, 3), 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/gaussian_blur.png", imgGaussianBlur);

    // ****************** Create image that will be cropped at end of program before OCR ***************************

    // ------------------ Binary theshold for OCR (used later)------------------
    Mat imgThresholdOCR = new Mat();
    Imgproc.adaptiveThreshold(imgGaussianBlur, imgThresholdOCR, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 7, 10);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThresholdOCR.png", imgThresholdOCR);

    // ------------------ Erosion operation------------------
    Mat kern = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3));
    Mat imgErodeOCR = new Mat();
    Imgproc.morphologyEx(imgThresholdOCR, imgErodeOCR, Imgproc.MORPH_DILATE, kern); //Imgproc.MORPH_DILATE is performing erosion, wtf?
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgErodeOCR.png", imgErodeOCR);

    //------------------ Dilation operation  ------------------
    Mat kernall = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    Mat imgDilateOCR = new Mat();
    Imgproc.morphologyEx(imgErodeOCR, imgDilateOCR, Imgproc.MORPH_ERODE, kernall);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgDilateOCR.png", imgDilateOCR);

    // *************************************************************************************************************

    //        // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    //        Mat k = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3));
    //        Mat imgCloseOCR = new Mat();
    //        Imgproc.morphologyEx(imgThresholdOCR,imgCloseOCR,1,k);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgCloseOCR.png", imgCloseOCR);

    // ------------------ Sobel vertical edge detection ------------------
    Mat imgSobel = new Mat();
    Imgproc.Sobel(imgGaussianBlur, imgSobel, -1, 1, 0);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgSobel.png", imgSobel);

    // ------------------ Binary theshold ------------------
    Mat imgThreshold = new Mat();
    Imgproc.adaptiveThreshold(imgSobel, imgThreshold, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, 99, -60);
    //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThreshold.png", imgThreshold);

    //        // ------------------ Open operation (erosion followed by dilation) ------------------
    //        Mat ker = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 2));
    //        Mat imgOpen = new Mat();
    //        Imgproc.morphologyEx(imgThreshold,imgOpen,0,ker);
    //        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgOpen.png", imgOpen);

    // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------
    Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(22, 8));
    Mat imgClose = new Mat();
    Imgproc.morphologyEx(imgThreshold, imgClose, 1, kernel);
    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgClose.png", imgClose);

    // ------------------ Find contours ------------------
    List<MatOfPoint> contours = new ArrayList<>();

    Imgproc.findContours(imgClose, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    // **************************** DEBUG CODE **************************

    Mat contourImg = new Mat(imgClose.size(), imgClose.type());
    for (int i = 0; i < contours.size(); i++) {
        Imgproc.drawContours(contourImg, contours, i, new Scalar(255, 255, 255), -1);
    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/contours.png", contourImg);

    // ******************************************************************

    // --------------  Convert contours --------------------

    //Convert to MatOfPoint2f so that minAreaRect can be called
    List<MatOfPoint2f> newContours = new ArrayList<>();

    for (MatOfPoint mat : contours) {

        MatOfPoint2f newPoint = new MatOfPoint2f(mat.toArray());
        newContours.add(newPoint);

    }

    //Get minAreaRects
    List<RotatedRect> minAreaRects = new ArrayList<>();

    for (MatOfPoint2f mat : newContours) {

        RotatedRect rect = Imgproc.minAreaRect(mat);

        /*
         --------------- BUG WORK AROUND ------------
                
        Possible bug:
        When converting from MatOfPoint2f to RotatectRect the width height were reversed and the
        angle was -90 degrees from what it would be if the width and height were correct.
                
        When painting rectangle in image, the correct boxes were produced, but performing calculations on rect.angle
        rect.width, or rect.height yielded unwanted results.
                
        The following work around is buggy but works for my purpose
         */

        if (rect.size.width < rect.size.height) {
            double temp;

            temp = rect.size.width;
            rect.size.width = rect.size.height;
            rect.size.height = temp;
            rect.angle = rect.angle + 90;

        }

        //check aspect ratio and area and angle
        if (rect.size.width / rect.size.height > 1 && rect.size.width / rect.size.height < 5
                && rect.size.width * rect.size.height > 10000 && rect.size.width * rect.size.height < 50000
                && Math.abs(rect.angle) < 20) {
            minAreaRects.add(rect);
        }

        //minAreaRects.add(rect);
    }

    // **************************** DEBUG CODE **************************
    /*
    The following code is used to draw the rectangles on top of the original image for debugging purposes
     */
    //Draw Rotated Rects
    Point[] vertices = new Point[4];

    Mat imageWithBoxes = img;

    // Draw color rectangles on top of binary contours
    //        Mat imageWithBoxes = new Mat();
    //        Mat temp = imgDilateOCR;
    //        Imgproc.cvtColor(temp, imageWithBoxes, Imgproc.COLOR_GRAY2RGB);

    for (RotatedRect rect : minAreaRects) {

        rect.points(vertices);

        for (int i = 0; i < 4; i++) {
            Imgproc.line(imageWithBoxes, vertices[i], vertices[(i + 1) % 4], new Scalar(0, 0, 255), 2);
        }

    }

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgWithBoxes.png", imageWithBoxes);

    // ******************************************************************

    // **************************** DEBUG CODE **************************
    //        for(RotatedRect rect : minAreaRects) {
    //            System.out.println(rect.toString());
    //        }
    // ******************************************************************

    /*
    In order to rotate image without cropping it:
            
    1. Create new square image with dimension = diagonal of initial image.
    2. Draw initial image into the center of new image.
     Insert initial image at ROI (Region of Interest) in new image
    3. Rotate new image
     */

    //Find diagonal/hypotenuse
    int hypotenuse = (int) Math.sqrt((img.rows() * img.rows()) + (img.cols() * img.cols()));

    //New Mat with hypotenuse as height and width
    Mat rotateSpace = new Mat(hypotenuse, hypotenuse, 0);

    int ROI_x = (rotateSpace.width() - imgClose.width()) / 2; //x start of ROI
    int ROI_y = (rotateSpace.height() - imgClose.height()) / 2; //x start of ROI

    //designate region of interest
    Rect r = new Rect(ROI_x, ROI_y, imgClose.width(), imgClose.height());

    //Insert image into region of interest
    imgDilateOCR.copyTo(rotateSpace.submat(r));

    Mat rotatedTemp = new Mat(); //Mat to hold temporarily rotated mat
    Mat rectMat = new Mat();//Mat to hold rect contents (needed for looping through pixels)
    Point[] rectVertices = new Point[4];//Used to build rect to make ROI
    Rect rec = new Rect();

    List<RotatedRect> edgeDensityRects = new ArrayList<>(); //populate new arraylist with rects that satisfy edge density

    int count = 0;

    //Loop through Rotated Rects and find edge density
    for (RotatedRect rect : minAreaRects) {

        count++;

        rect.center = new Point((float) ROI_x + rect.center.x, (float) ROI_y + rect.center.y);

        //rotate image to math orientation of rotated rect
        rotate(rotateSpace, rotatedTemp, rect.center, rect.angle);

        //remove rect rotation
        rect.angle = 0;

        //get vertices from rotatedRect
        rect.points(rectVertices);

        // **************************** DEBUG CODE **************************
        //
        //            for (int k = 0; k < 4; k++) {
        //                System.out.println(rectVertices[k]);
        //                Imgproc.line(rotatedTemp, rectVertices[k], rectVertices[(k + 1) % 4], new Scalar(0, 0, 255), 2);
        //            }
        //
        //            Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotated" + count + ".png", rotatedTemp);

        // *****************************************************************

        //build rect to use as ROI
        rec = new Rect(rectVertices[1], rectVertices[3]);

        rectMat = rotatedTemp.submat(rec);

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/extracted" + count + ".png", rectMat);

        //find edge density

        //            // ------------------------ edge density check NOT IMPLEMENTED --------------------
        //            /*
        //            Checking for edge density was not necessary for this image so it was not implemented due to lack of time
        //             */
        //            for(int i = 0; i < rectMat.rows(); ++i){
        //                for(int j = 0; j < rectMat.cols(); ++j){
        //
        //                  //add up white pixels
        //                }
        //            }
        //
        //            //check number of white pixels against total pixels
        //            //only add rects to new arraylist that satisfy threshold

        edgeDensityRects.add(rect);
    }

    // **************************** DEBUG CODE **************************

    Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpace.png", rotateSpace);
    //Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpaceROTATED.png", rotatedTemp);

    //System.out.println(imgGray.type());

    // *****************************************************************

    // if there is only one rectangle left, its the license plate
    if (edgeDensityRects.size() == 1) {

        String result = ""; //Hold result from OCR
        BufferedImage bimg;
        Mat cropped;

        cropped = rectMat.submat(new Rect(20, 50, rectMat.width() - 40, rectMat.height() - 70));

        Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rectMatCropped.png", cropped);

        bimg = matToBufferedImage(cropped);

        BufferedImage image = bimg;

        try {
            result = tessInstance.doOCR(image);
        } catch (TesseractException e) {
            System.err.println(e.getMessage());
        }

        for (int i = 0; i < 10; ++i) {

        }

        result = result.replace("\n", "");

        System.out.println(result);

        CarProfDBImpl db = new CarProfDBImpl();

        db.connect("localhost:3306/computer_vision", "root", "*******");

        CarProf c = db.getCarProf(result);

        System.out.print(c.toString());

        db.close();

    }

}

From source file:ac.robinson.ticqr.TickBoxImageParserTask.java

License:Apache License

@Override
protected ArrayList<PointF> doInBackground(Void... unused) {
    Log.d(TAG, "Searching for tick boxes of " + mBoxSize + " size");

    // we look for *un-ticked* boxes, rather than ticked, as they are uniform in appearance (and hence easier to
    // detect) - they show up as a box within a box
    ArrayList<PointF> centrePoints = new ArrayList<>();
    int minimumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize, 2));
    int maximumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize * 1.35f, 2));
    int minimumInnerBoxArea = (int) Math.round(Math.pow(mBoxSize * 0.5f, 2));

    // image adjustment - blurSize, blurSTDev and adaptiveThresholdSize must not be even numbers
    int blurSize = 9;
    int blurSTDev = 3;
    int adaptiveThresholdSize = Math.round(mBoxSize * 3); // (oddness ensured below)
    int adaptiveThresholdC = 4; // value to add to the mean (can be negative or zero)
    adaptiveThresholdSize = adaptiveThresholdSize % 2 == 0 ? adaptiveThresholdSize + 1 : adaptiveThresholdSize;

    // how similar the recognised polygon must be to its actual contour - lower is more similar
    float outerPolygonSimilarity = 0.045f;
    float innerPolygonSimilarity = 0.075f; // don't require as much accuracy for the inner part of the tick box

    // how large the maximum internal angle can be (e.g., for checking square shape)
    float maxOuterAngleCos = 0.3f;
    float maxInnerAngleCos = 0.4f;

    // use OpenCV to recognise boxes that have a box inside them - i.e. an un-ticked tick box
    // see: http://stackoverflow.com/a/11427501
    // Bitmap newBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true); // not needed
    Mat bitMat = new Mat();
    Utils.bitmapToMat(mBitmap, bitMat);/*from   w w  w .  j  a va  2 s .  c o  m*/

    // blur and convert to grey
    // alternative (less flexible): Imgproc.medianBlur(bitMat, bitMat, blurSize);
    Imgproc.GaussianBlur(bitMat, bitMat, new Size(blurSize, blurSize), blurSTDev, blurSTDev);
    Imgproc.cvtColor(bitMat, bitMat, Imgproc.COLOR_RGB2GRAY); // need 8uC1 (1 channel, unsigned char) image type

    // perform adaptive thresholding to detect edges
    // alternative (slower): Imgproc.Canny(bitMat, bitMat, 10, 20, 3, false);
    Imgproc.adaptiveThreshold(bitMat, bitMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY,
            adaptiveThresholdSize, adaptiveThresholdC);

    // get the contours in the image, and their hierarchy
    Mat hierarchyMat = new Mat();
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(bitMat, contours, hierarchyMat, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
    if (DEBUG) {
        Imgproc.drawContours(bitMat, contours, -1, new Scalar(30, 255, 255), 1);
    }

    // parse the contours and look for a box containing another box, with similar enough sizes
    int numContours = contours.size();
    ArrayList<Integer> searchedContours = new ArrayList<>();
    Log.d(TAG, "Found " + numContours + " possible tick box areas");
    if (numContours > 0 && !hierarchyMat.empty()) {
        for (int i = 0; i < numContours; i++) {

            // the original detected contour
            MatOfPoint boxPoints = contours.get(i);

            // hierarchy key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num
            int childBox = (int) hierarchyMat.get(0, i)[2]; // usually the largest child (as we're doing RETR_TREE)
            if (childBox == -1) { // we only want elements that have children
                continue;
            } else {
                if (searchedContours.contains(childBox)) {
                    if (DEBUG) {
                        Log.d(TAG, "Ignoring duplicate box at first stage: " + childBox);
                    }
                    continue;
                } else {
                    searchedContours.add(childBox);
                }
            }

            // discard smaller (i.e. noise) outer box areas as soon as possible for speed
            // used to do Imgproc.isContourConvex(outerPoints) later, but the angle check covers this, so no need
            double originalArea = Math.abs(Imgproc.contourArea(boxPoints));
            if (originalArea < minimumOuterBoxArea) {
                // if (DEBUG) {
                // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                // Log.d(TAG, "Outer box too small");
                // }
                continue;
            }
            if (originalArea > maximumOuterBoxArea) {
                // if (DEBUG) {
                // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                // Log.d(TAG, "Outer box too big");
                // }
                continue;
            }

            // simplify the contours of the outer box - we want to detect four-sided shapes only
            MatOfPoint2f boxPoints2f = new MatOfPoint2f(boxPoints.toArray()); // Point2f for approxPolyDP
            Imgproc.approxPolyDP(boxPoints2f, boxPoints2f,
                    outerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true); // simplify the contour
            if (boxPoints2f.height() != 4) { // height is number of points
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer box not 4 points");
                }
                continue;
            }

            // check that the simplified outer box is approximately a square, angle-wise
            org.opencv.core.Point[] boxPointsArray = boxPoints2f.toArray();
            double maxCosine = 0;
            for (int j = 0; j < 4; j++) {
                org.opencv.core.Point pL = boxPointsArray[j];
                org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4];
                org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4];
                getLineAngle(pL, pIntersect, pR);
                maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR));
            }
            if (maxCosine > maxOuterAngleCos) {
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer angles not square enough");
                }
                continue;
            }

            // check that the simplified outer box is approximately a square, line length-wise
            double minLine = Double.MAX_VALUE;
            double maxLine = 0;
            for (int p = 1; p < 4; p++) {
                org.opencv.core.Point p1 = boxPointsArray[p - 1];
                org.opencv.core.Point p2 = boxPointsArray[p];
                double xd = p1.x - p2.x;
                double yd = p1.y - p2.y;
                double lineLength = Math.sqrt((xd * xd) + (yd * yd));
                minLine = Math.min(minLine, lineLength);
                maxLine = Math.max(maxLine, lineLength);
            }
            if (maxLine - minLine > minLine) {
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer lines not square enough");
                }
                continue;
            }

            // draw the outer box if debugging
            if (DEBUG) {
                MatOfPoint debugBoxPoints = new MatOfPoint(boxPointsArray);
                Log.d(TAG,
                        "Potential tick box: " + boxPoints2f.size() + ", " + "area: "
                                + Math.abs(Imgproc.contourArea(debugBoxPoints)) + " (min:" + minimumOuterBoxArea
                                + ", max:" + maximumOuterBoxArea + ")");
                drawPoints(bitMat, debugBoxPoints, new Scalar(50, 255, 255), 2);
            }

            // loop through the children - they should be in descending size order, but sometimes this is wrong
            boolean wrongBox = false;
            while (true) {
                if (DEBUG) {
                    Log.d(TAG, "Looping with box: " + childBox);
                }

                // we've previously tried a child - try the next one
                // key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num
                if (wrongBox) {
                    childBox = (int) hierarchyMat.get(0, childBox)[0];
                    if (childBox == -1) {
                        break;
                    }
                    if (searchedContours.contains(childBox)) {
                        if (DEBUG) {
                            Log.d(TAG, "Ignoring duplicate box at loop stage: " + childBox);
                        }
                        break;
                    } else {
                        searchedContours.add(childBox);
                    }
                    //noinspection UnusedAssignment
                    wrongBox = false;
                }

                // perhaps this is the outer box - check its child has no children itself
                // (removed so tiny children (i.e. noise) don't mean we mis-detect an un-ticked box as ticked)
                // if (hierarchyMat.get(0, childBox)[2] != -1) {
                // continue;
                // }

                // check the size of the child box is large enough
                boxPoints = contours.get(childBox);
                originalArea = Math.abs(Imgproc.contourArea(boxPoints));
                if (originalArea < minimumInnerBoxArea) {
                    if (DEBUG) {
                        // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                        Log.d(TAG, "Inner box too small");
                    }
                    wrongBox = true;
                    continue;
                }

                // simplify the contours of the inner box - again, we want four-sided shapes only
                boxPoints2f = new MatOfPoint2f(boxPoints.toArray());
                Imgproc.approxPolyDP(boxPoints2f, boxPoints2f,
                        innerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true);
                if (boxPoints2f.height() != 4) { // height is number of points
                    // if (DEBUG) {
                    // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                    // }
                    Log.d(TAG, "Inner box fewer than 4 points"); // TODO: allow > 4 for low quality images?
                    wrongBox = true;
                    continue;
                }

                // check that the simplified inner box is approximately a square, angle-wise
                // higher tolerance because noise means if we get several inners, the box may not be quite square
                boxPointsArray = boxPoints2f.toArray();
                maxCosine = 0;
                for (int j = 0; j < 4; j++) {
                    org.opencv.core.Point pL = boxPointsArray[j];
                    org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4];
                    org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4];
                    getLineAngle(pL, pIntersect, pR);
                    maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR));
                }
                if (maxCosine > maxInnerAngleCos) {
                    Log.d(TAG, "Inner angles not square enough");
                    wrongBox = true;
                    continue;
                }

                // this is probably an inner box - log if debugging
                if (DEBUG) {
                    Log.d(TAG,
                            "Un-ticked inner box: " + boxPoints2f.size() + ", " + "area: "
                                    + Math.abs(Imgproc.contourArea(new MatOfPoint2f(boxPointsArray)))
                                    + " (min: " + minimumInnerBoxArea + ")");
                }

                // find the inner box centre
                double centreX = (boxPointsArray[0].x + boxPointsArray[1].x + boxPointsArray[2].x
                        + boxPointsArray[3].x) / 4f;
                double centreY = (boxPointsArray[0].y + boxPointsArray[1].y + boxPointsArray[2].y
                        + boxPointsArray[3].y) / 4f;

                // draw the inner box if debugging
                if (DEBUG) {
                    drawPoints(bitMat, new MatOfPoint(boxPointsArray), new Scalar(255, 255, 255), 1);
                    Core.circle(bitMat, new org.opencv.core.Point(centreX, centreY), 3,
                            new Scalar(255, 255, 255));
                }

                // add to the list of boxes to check
                centrePoints.add(new PointF((float) centreX, (float) centreY));
                break;
            }
        }
    }

    Log.d(TAG, "Found " + centrePoints.size() + " un-ticked boxes");
    return centrePoints;
}

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

/**
 * Calculates an average intensity of pixels on image within specified contour.
 *
 * @param src - source image used for calculating an average intensity.
 * @param contour - a contour which is presented as some region of interest for operation.
 * @return a level of average intensity.
 *///from  ww  w  .j av  a2s.  c  o m
public static int averageIntensity(Mat src, MatOfPoint contour) {
    int averageIntensityWithinContour = 0;
    int quantityOfPixelsWithinContour = 0;
    Rect boundingRectangle = boundingRect(contour);

    for (int xCoord = (int) boundingRectangle.tl().x; xCoord <= (int) boundingRectangle.br().x; xCoord++) {
        for (int yCoord = (int) boundingRectangle.tl().y; yCoord <= (int) boundingRectangle.br().y; yCoord++) {
            if (pointPolygonTest(new MatOfPoint2f(contour.toArray()), new Point(xCoord, yCoord), false) > 0) {
                averageIntensityWithinContour += intensity(src, xCoord, yCoord);
                quantityOfPixelsWithinContour++;
            }
        }
    }

    if (quantityOfPixelsWithinContour == 0) {
        quantityOfPixelsWithinContour = 1;
        averageIntensityWithinContour = intensity(src, boundingRectangle.x, boundingRectangle.y);
        if (src.channels() == 1) {
            averageIntensityWithinContour = averageIntensityWithinContour > 127 ? 0 : 255;
        }
    }

    return averageIntensityWithinContour / quantityOfPixelsWithinContour;
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private void detectAstrocytesOld(Mat source, Integer averageRectSize, Double averageArea, int intensity) {
    if (source.channels() == 3) {
        source = CoreOperations.grayscale(source);
    }/*w w w.  j  ava 2s  .  co m*/

    astrocytesCenters = new ArrayList<>();
    List<MatOfPoint> contoursAfterFirstIteration = new ArrayList<>();
    Mat hierarchy = new Mat();

    /* Step 1 */
    findContours(source, contoursAfterFirstIteration, hierarchy, Imgproc.RETR_LIST,
            Imgproc.CHAIN_APPROX_TC89_L1);

    for (MatOfPoint contour : contoursAfterFirstIteration) {
        Rect boundingRectangle = boundingRect(contour);
        Double contourArea = contourArea(contour);
        Double contourPerimeter = arcLength(new MatOfPoint2f(contour.toArray()), true);

        /* Step 2 */
        if (averageArea - 160 <= contourArea /*&& contourArea <= averageArea + 10*/) {
            /* Step 3 */
            if (((averageRectSize - 15 <= boundingRectangle.width)
                    && (boundingRectangle.width <= averageRectSize + 15)
                    || (averageRectSize - 15 <= boundingRectangle.height)
                            && (boundingRectangle.height <= averageRectSize + 15))
                    && (boundingRectangle.width / (float) boundingRectangle.height < 1.8f)
                    && (boundingRectangle.height / (float) boundingRectangle.width < 1.8f)) {
                /* Step 4 */
                if (contourArea / (contourPerimeter * contourPerimeter) > 0.05
                        && contourArea / (contourPerimeter * contourPerimeter) < 0.30) {
                    int averageIntensityWithinContour = CoreOperations.averageIntensity(sourceImage, contour);

                    /* Step 5 */
                    if (averageIntensityWithinContour <= intensity + 20) {
                        int xCoordOfAstrocyteCenter = (int) boundingRectangle.tl().x
                                + boundingRectangle.width / 2;
                        int yCoordOfAstrocyteCenter = (int) boundingRectangle.tl().y
                                + boundingRectangle.height / 2;
                        astrocytesCenters.add(new Point(xCoordOfAstrocyteCenter, yCoordOfAstrocyteCenter));
                    }
                }
            }
        }
    }
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private void findAstrocytes(Mat src) {
    astrocytesCenters = new ArrayList<Point>();

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    findContours(CoreOperations.grayscale(src), contours, hierarchy, Imgproc.RETR_LIST,
            Imgproc.CHAIN_APPROX_TC89_L1);

    for (MatOfPoint contour : contours) {
        Rect boundingRectangle = boundingRect(contour);
        Double contourArea = contourArea(contour);
        Double contourPerimeter = arcLength(new MatOfPoint2f(contour.toArray()), true);

        if (contourArea / (contourPerimeter * contourPerimeter) > 0.05
                && contourArea / (contourPerimeter * contourPerimeter) < 0.30) {
            int xCenter = boundingRectangle.x + boundingRectangle.width / 2;
            int yCenter = boundingRectangle.y + boundingRectangle.height / 2;
            astrocytesCenters.add(new Point(xCenter, yCenter));
        }/*ww  w .  j  a  v  a  2  s.c  o m*/
    }
}

From source file:com.mycompany.objectdetection.ObjectDetector.java

public void findObjects() {

    //        Imgproc.cvtColor(img, imgGrayscale, Imgproc.COLOR_RGBA2GRAY, 1); 
    //        Core.convertScaleAbs(img, imgGrayscale);
    //        Core.normalize(imgGrayscale, imgMeanShifted, 0.0, 1.0, NORM_MINMAX);
    preProcessImg();/*  w w  w  .  j  a  v a2 s . c o  m*/

    toGrayScale(imgMeanShifted);
    detectEdges(imgGrayscale);
    Imgproc.findContours(imgCanny, contours, imgCanny, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
    objList = new ArrayList();

    for (MatOfPoint mop : contours) {
        MatOfPoint2f m2p;
        m2p = new MatOfPoint2f(mop.toArray());
        Double peri = Imgproc.arcLength(m2p, true);
        Imgproc.approxPolyDP(m2p, m2p, 0.02 * peri, true);
        Imgproc.drawContours(imgOut, contours, -1, new Scalar(0, 0, 255), 2);

        float area = img.width() * img.height();
        Rect rect = Imgproc.boundingRect(mop);
        objList.add(rect);
        Imgproc.rectangle(imgOut, rect.tl(), rect.br(), new Scalar(255, 0, 0));
    }

    Collections.sort(objList, new Comparator<Rect>() {
        @Override
        public int compare(Rect r1, Rect r2) {
            return (int) (r2.area() - r1.area());
        }

    });

    List<Rect> arr = objList;

    while (arr.size() > 0) {
        //System.out.println("---->" + arr);
        Rect bigRect = arr.get(0);
        arr.remove(0);
        Rect bigRect2 = new Rect();

        while (!equals(bigRect, bigRect2)) {
            bigRect2 = bigRect;
            for (int i = 0; i < arr.size(); ++i) {
                // System.out.println("elotte"+arr.get(i));
                if (doOverlap(bigRect, arr.get(i))) {
                    //System.out.println("utana"+arr.get(i));
                    bigRect = union(bigRect, arr.get(i));
                    arr.remove(i);
                    break;
                }
            }

        }

        mainRect = bigRect;

        if (objList.size() > 5 && mainRect.area() >= img.width() * img.height() * 3 / 100) {
            Imgproc.rectangle(imgOut, bigRect.tl(), bigRect.br(), new Scalar(255, 255, 0));
            mainObjects.add(mainRect);
        } else if (objList.size() <= 5) {
            mainObjects.add(mainRect);
        }
    }

}

From source file:detectiontest.Particle.java

public static Rect calcBoundingBox(MatOfPoint contour) {
    MatOfPoint2f curve = new MatOfPoint2f(contour.toArray());
    MatOfPoint2f curveApprox = new MatOfPoint2f();
    Imgproc.approxPolyDP(curve, curveApprox, 3, true);
    return Imgproc.boundingRect(new MatOfPoint(curveApprox.toArray()));
}

From source file:edu.fiu.cate.breader.BaseSegmentation.java

public int[] polySearch(Point point, Mat hierarchy, List<MatOfPoint> contours, int current) {
    // first index is whether the point is contained and second is which children has it.
    // Third is who is next
    int[] out = new int[3];
    int[] hEntry = new int[4];
    hierarchy.get(0, current, hEntry);//from ww  w. j a v a  2  s . c  o  m
    int nextChild = hEntry[2], nextSibling = hEntry[0];
    // If point is not within current contour return -1
    if (Imgproc.pointPolygonTest(new MatOfPoint2f(contours.get(current).toArray()), point, false) < 0) {
        out[0] = -1;
        out[1] = -1;
    } else {
        //Otherwise check if contours has children containing the point
        out[0] = 1;
        //Depth first search
        int[] ret = new int[3];
        int childrenCount = 0;
        while (nextChild != -1) {
            ret = polySearch(point, hierarchy, contours, nextChild);
            childrenCount++;
            nextChild = ret[2];
        }
        //If there is only one children return it.
        if (childrenCount == 1) {
            out[1] = ret[1];
        } else {//If more than one children are contained returned the parent
            out[1] = current;
        }
    }
    out[2] = nextSibling;

    return out;
}

From source file:eu.fpetersen.robobrain.behavior.followobject.OrbObjectDetector.java

License:Open Source License

public void process(Mat image) {
    Mat tempImage = new Mat();
    Imgproc.cvtColor(image, tempImage, Imgproc.COLOR_RGBA2RGB);
    MatOfKeyPoint keypoints = detectInImage(tempImage);
    Mat descriptors = extractDescriptors(keypoints, tempImage);
    MatOfDMatch matches = new MatOfDMatch();
    matcher.match(descriptors, originalDescriptors, matches);

    KeyPoint[] keypointArray = keypoints.toArray();
    KeyPoint[] originalKeypointArray = originalKeypoints.toArray();

    float min = 40.0f;
    float max = 1000.0f;
    for (DMatch match : matches.toList()) {
        if (match.distance < min) {
            min = match.distance;/*from w w  w  .  java  2 s  . c o  m*/
        } else if (match.distance > max) {
            max = match.distance;
        }
    }

    float threshold = 1.5f * min;
    List<KeyPoint> matchedKeyPoints = new ArrayList<KeyPoint>();
    List<Point> matchedPoints = new ArrayList<Point>();
    List<Point> matchedOriginalPoints = new ArrayList<Point>();
    for (DMatch match : matches.toList()) {
        if (match.distance < threshold) {
            KeyPoint matchedKeypoint = keypointArray[match.queryIdx];
            matchedKeyPoints.add(matchedKeypoint);
            matchedPoints.add(matchedKeypoint.pt);

            KeyPoint matchedOriginalKeypoint = originalKeypointArray[match.trainIdx];
            matchedOriginalPoints.add(matchedOriginalKeypoint.pt);
        }
    }

    if (matchedKeyPoints.size() > 10) {

        Mat H = Calib3d.findHomography(
                new MatOfPoint2f(matchedOriginalPoints.toArray(new Point[matchedOriginalPoints.size()])),
                new MatOfPoint2f(matchedPoints.toArray(new Point[matchedPoints.size()])), Calib3d.RANSAC, 10);

        List<Point> originalCorners = new ArrayList<Point>();
        originalCorners.add(new Point(0, 0));
        originalCorners.add(new Point(originalImage.cols(), 0));
        originalCorners.add(new Point(originalImage.cols(), originalImage.rows()));
        originalCorners.add(new Point(0, originalImage.rows()));

        List<Point> corners = new ArrayList<Point>();
        for (int i = 0; i < 4; i++) {
            corners.add(new Point(0, 0));
        }
        Mat objectCorners = Converters.vector_Point2f_to_Mat(corners);

        Core.perspectiveTransform(Converters.vector_Point2f_to_Mat(originalCorners), objectCorners, H);
        corners.clear();
        Converters.Mat_to_vector_Point2f(objectCorners, corners);

        Core.line(tempImage, corners.get(0), corners.get(1), new Scalar(0, 255, 0), 4);
        Core.line(tempImage, corners.get(1), corners.get(2), new Scalar(0, 255, 0), 4);
        Core.line(tempImage, corners.get(2), corners.get(3), new Scalar(0, 255, 0), 4);
        Core.line(tempImage, corners.get(3), corners.get(0), new Scalar(0, 255, 0), 4);
    }

    Features2d.drawKeypoints(tempImage,
            new MatOfKeyPoint(matchedKeyPoints.toArray(new KeyPoint[matchedKeyPoints.size()])), tempImage);
    Imgproc.cvtColor(tempImage, image, Imgproc.COLOR_RGB2RGBA);

}

From source file:karthik.Barcode.MatrixBarcode.java

License:Open Source License

public List<CandidateResult> locateBarcode() throws IOException {

    calcGradientDirectionAndMagnitude();
    for (int tileSize = searchParams.tileSize; tileSize < rows && tileSize < cols; tileSize *= 4) {
        img_details.probabilities = calcProbabilityMatrix(tileSize); // find areas with low variance in gradient direction

        //    connectComponents();
        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        // findContours modifies source image so probabilities pass it a clone of img_details.probabilities
        // img_details.probabilities will be used again shortly to expand the barcode region
        Imgproc.findContours(img_details.probabilities.clone(), contours, hierarchy, Imgproc.RETR_LIST,
                Imgproc.CHAIN_APPROX_SIMPLE);

        double bounding_rect_area = 0;
        RotatedRect minRect;/*w w  w  .j a  va 2 s  . c  om*/
        CandidateResult ROI;
        int area_multiplier = (searchParams.RECT_HEIGHT * searchParams.RECT_WIDTH)
                / (searchParams.PROB_MAT_TILE_SIZE * searchParams.PROB_MAT_TILE_SIZE);
        // pictures were downsampled during probability calc so we multiply it by the tile size to get area in the original picture

        for (int i = 0; i < contours.size(); i++) {
            double area = Imgproc.contourArea(contours.get(i));

            if (area * area_multiplier < searchParams.THRESHOLD_MIN_AREA) // ignore contour if it is of too small a region
                continue;

            minRect = Imgproc.minAreaRect(new MatOfPoint2f(contours.get(i).toArray()));
            bounding_rect_area = minRect.size.width * minRect.size.height;
            if (DEBUG_IMAGES) {
                System.out.println("Area is " + area * area_multiplier + " MIN_AREA is "
                        + searchParams.THRESHOLD_MIN_AREA);
                System.out.println("area ratio is " + ((area / bounding_rect_area)));
            }

            if ((area / bounding_rect_area) > searchParams.THRESHOLD_AREA_RATIO) // check if contour is of a rectangular object
            {
                CandidateMatrixBarcode cb = new CandidateMatrixBarcode(img_details, minRect, searchParams);
                if (DEBUG_IMAGES)
                    cb.debug_drawCandidateRegion(new Scalar(0, 255, 128), img_details.src_scaled);
                // get candidate regions to be a barcode

                // rotates candidate region to straighten it based on the angle of the enclosing RotatedRect                
                ROI = cb.NormalizeCandidateRegion(Barcode.USE_ROTATED_RECT_ANGLE);
                if (postProcessResizeBarcode)
                    ROI.ROI = scale_candidateBarcode(ROI.ROI);

                candidateBarcodes.add(ROI);

                if (DEBUG_IMAGES)
                    cb.debug_drawCandidateRegion(new Scalar(0, 0, 255), img_details.src_scaled);
            }
        }
    }
    return candidateBarcodes;
}