Example usage for org.opencv.imgproc Imgproc erode

List of usage examples for org.opencv.imgproc Imgproc erode

Introduction

In this page you can find the example usage for org.opencv.imgproc Imgproc erode.

Prototype

public static void erode(Mat src, Mat dst, Mat kernel) 

Source Link

Usage

From source file:com.mitzuli.core.ocr.OcrPreprocessor.java

License:Open Source License

/**
 * Binarizes and cleans the input image for OCR, saving debugging images in the given directory.
 *
 * @param input the input image, which is recycled by this method, so the caller should make a defensive copy of it if necessary.
 * @param debugDir the directory to write the debugging images to, or null to disable debugging.
 * @return the preprocessed image./*w  w  w .jav a  2 s.  c  o  m*/
 */
static Image preprocess(final Image input, final File debugDir) {
    // TODO Temporary workaround to allow to manually enable debugging (the global final variable should be used)
    boolean DEBUG = debugDir != null;

    // Initialization
    final Mat mat = input.toGrayscaleMat();
    final Mat debugMat = DEBUG ? input.toRgbMat() : null;
    input.recycle();
    final Mat aux = new Mat(mat.size(), CvType.CV_8UC1);
    final Mat binary = new Mat(mat.size(), CvType.CV_8UC1);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "1_input.jpg"));

    // Binarize the input image in mat through adaptive Gaussian thresholding
    Imgproc.adaptiveThreshold(mat, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 51,
            13);
    // Imgproc.adaptiveThreshold(mat, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 31, 7);

    // Edge detection
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_OPEN, KERNEL_3X3); // Open
    Imgproc.morphologyEx(mat, aux, Imgproc.MORPH_CLOSE, KERNEL_3X3); // Close
    Core.addWeighted(mat, 0.5, aux, 0.5, 0, mat); // Average
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_GRADIENT, KERNEL_3X3); // Gradient
    Imgproc.threshold(mat, mat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU); // Edge map
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "2_edges.jpg"));

    // Extract word level connected-components from the dilated edge map
    Imgproc.dilate(mat, mat, KERNEL_3X3);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "3_dilated_edges.jpg"));
    final List<MatOfPoint> wordCCs = new ArrayList<MatOfPoint>();
    Imgproc.findContours(mat, wordCCs, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    // Filter word level connected-components individually and calculate their average attributes
    final List<MatOfPoint> individuallyFilteredWordCCs = new ArrayList<MatOfPoint>();
    final List<MatOfPoint> removedWordCCs = new ArrayList<MatOfPoint>();
    double avgWidth = 0, avgHeight = 0, avgArea = 0;
    for (MatOfPoint cc : wordCCs) {
        final Rect boundingBox = Imgproc.boundingRect(cc);
        if (boundingBox.height >= 6 // bounding box height >= 6
                && boundingBox.area() >= 50 // bounding box area >= 50
                && (double) boundingBox.width / (double) boundingBox.height >= 0.25 // bounding box aspect ratio >= 1:4
                && boundingBox.width <= 0.75 * mat.width() // bounding box width <= 0.75 image width
                && boundingBox.height <= 0.75 * mat.height()) // bounding box height <= 0.75 image height
        {
            individuallyFilteredWordCCs.add(cc);
            avgWidth += boundingBox.width;
            avgHeight += boundingBox.height;
            avgArea += boundingBox.area();
        } else {
            if (DEBUG)
                removedWordCCs.add(cc);
        }
    }
    wordCCs.clear();
    avgWidth /= individuallyFilteredWordCCs.size();
    avgHeight /= individuallyFilteredWordCCs.size();
    avgArea /= individuallyFilteredWordCCs.size();
    if (DEBUG) {
        Imgproc.drawContours(debugMat, removedWordCCs, -1, BLUE, -1);
        removedWordCCs.clear();
    }

    // Filter word level connected-components in relation to their average attributes
    final List<MatOfPoint> filteredWordCCs = new ArrayList<MatOfPoint>();
    for (MatOfPoint cc : individuallyFilteredWordCCs) {
        final Rect boundingBox = Imgproc.boundingRect(cc);
        if (boundingBox.width >= 0.125 * avgWidth // bounding box width >= 0.125 average width
                && boundingBox.width <= 8 * avgWidth // bounding box width <= 8 average width
                && boundingBox.height >= 0.25 * avgHeight // bounding box height >= 0.25 average height
                && boundingBox.height <= 4 * avgHeight) // bounding box height <= 4 average height
        {
            filteredWordCCs.add(cc);
        } else {
            if (DEBUG)
                removedWordCCs.add(cc);
        }
    }
    individuallyFilteredWordCCs.clear();
    if (DEBUG) {
        Imgproc.drawContours(debugMat, filteredWordCCs, -1, GREEN, -1);
        Imgproc.drawContours(debugMat, removedWordCCs, -1, PURPLE, -1);
        removedWordCCs.clear();
    }

    // Extract paragraph level connected-components
    mat.setTo(BLACK);
    Imgproc.drawContours(mat, filteredWordCCs, -1, WHITE, -1);
    final List<MatOfPoint> paragraphCCs = new ArrayList<MatOfPoint>();
    Imgproc.morphologyEx(mat, aux, Imgproc.MORPH_CLOSE, KERNEL_30X30);
    Imgproc.findContours(aux, paragraphCCs, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    // Filter paragraph level connected-components according to the word level connected-components inside
    final List<MatOfPoint> textCCs = new ArrayList<MatOfPoint>();
    for (MatOfPoint paragraphCC : paragraphCCs) {
        final List<MatOfPoint> wordCCsInParagraphCC = new ArrayList<MatOfPoint>();
        aux.setTo(BLACK);
        Imgproc.drawContours(aux, Collections.singletonList(paragraphCC), -1, WHITE, -1);
        Core.bitwise_and(mat, aux, aux);
        Imgproc.findContours(aux, wordCCsInParagraphCC, new Mat(), Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);
        final Rect boundingBox = Imgproc.boundingRect(paragraphCC);
        final double center = mat.size().width / 2;
        final double distToCenter = center > boundingBox.x + boundingBox.width
                ? center - boundingBox.x - boundingBox.width
                : center < boundingBox.x ? boundingBox.x - center : 0.0;
        if (DEBUG) {
            System.err.println("****************************************");
            System.err.println("\tArea:                " + boundingBox.area());
            System.err.println("\tDistance to center:  " + distToCenter);
            System.err.println("\tCCs inside:          " + wordCCsInParagraphCC.size());
        }
        if ((wordCCsInParagraphCC.size() >= 10 || wordCCsInParagraphCC.size() >= 0.3 * filteredWordCCs.size())
                && mat.size().width / distToCenter >= 4) {
            textCCs.addAll(wordCCsInParagraphCC);
            if (DEBUG) {
                System.err.println("\tText:                YES");
                Imgproc.drawContours(debugMat, Collections.singletonList(paragraphCC), -1, DARK_GREEN, 5);
            }
        } else {
            if (DEBUG) {
                System.err.println("\tText:                NO");
                Imgproc.drawContours(debugMat, Collections.singletonList(paragraphCC), -1, DARK_RED, 5);
            }
        }
    }
    filteredWordCCs.clear();
    paragraphCCs.clear();
    mat.setTo(WHITE);
    Imgproc.drawContours(mat, textCCs, -1, BLACK, -1);
    textCCs.clear();
    if (DEBUG)
        Image.fromMat(debugMat).write(new File(debugDir, "4_filtering.jpg"));

    // Obtain the final text mask from the filtered connected-components
    Imgproc.erode(mat, mat, KERNEL_15X15);
    Imgproc.morphologyEx(mat, mat, Imgproc.MORPH_OPEN, KERNEL_30X30);
    if (DEBUG)
        Image.fromMat(mat).write(new File(debugDir, "5_text_mask.jpg"));

    // Apply the text mask to the binarized image
    if (DEBUG)
        Image.fromMat(binary).write(new File(debugDir, "6_binary.jpg"));
    binary.setTo(WHITE, mat);
    if (DEBUG)
        Image.fromMat(binary).write(new File(debugDir, "7_binary_text.jpg"));

    // Dewarp the text using Leptonica
    Pix pixs = Image.fromMat(binary).toGrayscalePix();
    Pix pixsDewarp = Dewarp.dewarp(pixs, 0, Dewarp.DEFAULT_SAMPLING, 5, true);
    final Image result = Image.fromGrayscalePix(pixsDewarp);
    if (DEBUG)
        result.write(new File(debugDir, "8_dewarp.jpg"));

    // Clean up
    pixs.recycle();
    mat.release();
    aux.release();
    binary.release();
    if (debugMat != null)
        debugMat.release();

    return result;
}

From source file:com.opencv.mouse.MouseMainFrame.java

private void jToggleButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jToggleButton1ActionPerformed
    try {// ww  w  . j  a  va2  s . c  om
        robot = new Robot();
    } catch (AWTException e) {
    }
    t = new Thread() {
        public void run() {

            MatToBufImg matToBufferedImageConverter = new MatToBufImg(); //Utility class to convert Mat to Java's BufferedImage

            webCam = new VideoCapture(0);
            if (!webCam.isOpened()) {
                System.out.println("Kamera Ak Deil..!");
            } else
                System.out.println("Kamera Ald --> " + webCam.toString());

            Mat webcam_image = new Mat(480, 640, CvType.CV_8UC3);
            Mat hsv_image = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3);
            thresholded = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3,
                    new Scalar(255, 255, 255));
            if (webCam.isOpened()) {
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException ex) {

                }

                while (true) {
                    try {
                        webCam.read(webcam_image);
                    } catch (Exception e) {
                        System.out.println("Web Cam Kapal !");
                    }

                    if (!webcam_image.empty()) {
                        try {
                            Thread.sleep(10);
                        } catch (InterruptedException ex) {

                        }
                        // Mat inRangeResim = webcam_image.clone();
                        /*
                        Mat inRangeResim = webcam_image.clone();
                        matToBufferedImageConverter.setMatrix(inRangeResim, ".jpg");
                        image =matToBufferedImageConverter.getBufferedImage();
                        Highgui.imwrite("D:\\bitirme.jpg", inRangeResim);
                        */

                        //       MatOfRect faceDetections = new MatOfRect();
                        Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV);
                        //siyah hsv range 0 0 0 - 180 45 100
                        //hsvmavi   Core.inRange(webcam_image, new Scalar(75,63,40), new Scalar(118,255,255), webcam_image);
                        //rgb mavi        // Core.inRange(webcam_image, new Scalar(50,0,0), new Scalar(255,0,0), webcam_image);
                        //turuncu hsv      Core.inRange(webcam_image, new Scalar(5,50,50), new Scalar(15,255,255), webcam_image);
                        //Core.inRange(webcam_image, new Scalar(80,50,50), new Scalar(140,255,255), webcam_image);
                        //        Core.inRange(webcam_image, new Scalar(29,0,24), new Scalar(30,155,155), webcam_image);

                        //hsv mavi
                        //                       jSliderHmin.setValue(75);
                        //                       jSliderSmin.setValue(63);
                        //                       jSliderVmin.setValue(40);
                        //                       jSliderHmax.setValue(118);
                        //                       jSliderSmax.setValue(255);
                        //                       jSliderVmax.setValue(255);
                        //
                        //                       jSliderHmin.setValue(0);
                        //                       jSliderSmin.setValue(0);
                        //                       jSliderVmin.setValue(0);
                        //                       jSliderHmax.setValue(179);
                        //                       jSliderSmax.setValue(39);
                        //                       jSliderVmax.setValue(120);
                        Core.inRange(hsv_image, new Scalar(100, 97, 206), new Scalar(120, 255, 255),
                                thresholded);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);
                        Imgproc.dilate(thresholded, thresholded, element);

                        Imgproc.erode(thresholded, thresholded, element);

                        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
                        Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST,
                                Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));
                        Imgproc.drawContours(thresholded, contours, -1, new Scalar(255.0, 255.0, 255.0), 5);

                        for (int i = 0; i < contours.size(); i++) {
                            //  System.out.println(Imgproc.contourArea(contours.get(i)));
                            //    if (Imgproc.contourArea(contours.get(i)) > 1 ){
                            Rect rect = Imgproc.boundingRect(contours.get(i));
                            kesit = thresholded.submat(rect);
                            //System.out.println(rect.height);
                            // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){
                            //  System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width);
                            Core.rectangle(webcam_image, new Point(rect.x, rect.y),
                                    new Point(rect.x + rect.width, rect.y + rect.height),
                                    new Scalar(0, 0, 255));

                            //}
                            //}
                            if (rect.height > 15 && rect.width > 15) {
                                System.out.println(rect.x + "\n" + rect.y);
                                Core.circle(webcam_image, new Point(rect.x, rect.y), i, new Scalar(0, 255, 0));
                                robot.mouseMove((int) (rect.x * 3), (int) (rect.y * 2.25));
                            }

                        }

                        //   Imgproc.cvtColor(webcam_image, webcam_image, Imgproc.COLOR_HSV2BGR);
                        //  hsv_image.convertTo(hsv_image, CvType.CV_32F);

                        //   Imgproc.Canny(thresholded, thresholded, 10, 20);
                        //   Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //ise yarar

                        //    Imgproc.cvtColor(thresholded, thresholded, Imgproc.COLOR_GRAY2BGR);
                        //  Core.bitwise_and(thresholded, webcam_image, webcam_image);

                        //    webcam_image.copyTo(hsv_image, thresholded);
                        //                            System.out.println("<------------------------------>");
                        //                            System.out.println("BGR: " +webcam_image.channels()+"  Size : "+webcam_image.size());
                        //                            System.out.println("HSV :"+hsv_image.channels()+"  Size: "+hsv_image.size());
                        //                            System.out.println("Thresold :"+thresholded.channels()+"  Size : "+thresholded.size());
                        //                            System.out.println("<------------------------------>");
                        //
                        matToBufferedImageConverter.setMatrix(webcam_image, ".jpg");

                        image = matToBufferedImageConverter.getBufferedImage();
                        g.drawImage(image, 0, 0, webcam_image.cols(), webcam_image.rows(), null);

                    } else {

                        System.out.println("Grnt yok!");
                        break;
                    }
                }
                //           webCam.release();
            }

        }
    };
    threadDurum = true;
    t.start();
}

From source file:com.ttolley.pongbot.opencv.CvWorker.java

@Override
protected Void doInBackground() throws Exception {
    try {// w w w .j  ava 2s  .  c o m
        //-- 2. Read the video stream  
        Mat webcam_image = new Mat();

        if (capture.isOpened()) {
            while (true) {
                capture.read(webcam_image);
                if (!webcam_image.empty()) {
                    PublishObject publishObject = new PublishObject(webcam_image);
                    for (Map.Entry<FilterType, Filter> entry : filters.entrySet()) {
                        Mat hsv_image = new Mat();
                        Mat thresholded = new Mat();
                        Filter filter = entry.getValue();
                        // One way to select a range of colors by Hue  
                        Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV);
                        Core.inRange(hsv_image, filter.hsv_min, filter.hsv_max, thresholded);
                        // Morph open
                        final Size erodeSizeObj = new Size(filter.erodeSize, filter.erodeSize);
                        final Size dilateSizeObj = new Size(filter.dilateSize, filter.dilateSize);
                        Imgproc.erode(thresholded, thresholded,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, erodeSizeObj));
                        Imgproc.dilate(thresholded, thresholded,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, erodeSizeObj));
                        // Morph close
                        Imgproc.dilate(thresholded, thresholded,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, dilateSizeObj));
                        Imgproc.erode(thresholded, thresholded,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, dilateSizeObj));

                        Mat temp = new Mat();
                        thresholded.copyTo(temp);
                        List<MatOfPoint> contours = new ArrayList();
                        Mat heirarchy = new Mat();
                        Imgproc.findContours(temp, contours, heirarchy, Imgproc.RETR_TREE,
                                Imgproc.CHAIN_APPROX_SIMPLE);
                        FilteredObject.Target largestTarget = findTarget(contours, webcam_image, filter);
                        publishObject.addObject(entry.getKey(), new FilteredObject(largestTarget, thresholded));
                    }
                    publish(publishObject);

                } else {
                    System.out.println(" --(!) No captured frame -- Break!");
                    break;
                }
            }
        }
    } catch (Exception ex) {
        System.out.println("Unable to loop");
        System.out.println(getStackTrace(ex));
    }
    return null;
}

From source file:ctPrincipal.Filtros.java

private String minimo(int mx, int my) {
    Mat img = Imgcodecs.imread(imgS);/*from   www.j  ava  2s .c om*/
    Mat one = Mat.ones(mx, my, CvType.CV_32F);
    Imgproc.erode(img, output, one);
    Imgcodecs.imwrite("OutputImg/minimo.jpg", output);
    return "OutputImg/minimo.jpg";
}

From source file:cv.recon.controller.OutputDisplayController.java

License:Open Source License

/**
 * Subtract background using BackgroundSubtractorMOG2
 * @param src Source Mat/*from   w  w w .j  a  v a 2  s. c  o  m*/
 */
private void subtractBackground(Mat src) {
    if (bsmog != null) {
        bsmog.apply(src, fgMask);

        Imgproc.erode(fgMask, fgMask, kernel);
        Imgproc.dilate(fgMask, fgMask, kernel);

        output.setTo(new Scalar(0));
        src.copyTo(output, fgMask);

        if (isFirstFrame) {
            nonZeroCount = 0;
            isFirstFrame = false;
        } else {
            nonZeroCount = Core.countNonZero(fgMask);
        }
        nonZeroLabel.setText("" + nonZeroCount);
    }
}

From source file:fi.conf.tabare.ARDataProvider.java

private void detect() {

    //Mat composite_image;
    Mat input_image = new Mat();
    Mat undistorted_image = new Mat();
    Mat circles = new Mat();
    MatOfKeyPoint mokp = new MatOfKeyPoint();
    Mat cameraMatrix = null;//from   ww w . j  av a2s  .  c  o  m

    //List<Mat> channels = new LinkedList<>();

    //Loop
    while (running) {
        try {
            if (inputVideo.read(input_image)) {
                Mat preview_image = null;

                if (selectedView == View.calib)
                    preview_image = input_image.clone();

                //Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_RGB2HSV);
                //Core.split(input_image, channels);

                Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_BGR2GRAY);

                //Imgproc.equalizeHist(input_image, input_image);

                input_image.convertTo(input_image, -1, params.contrast, params.brightness); //image*contrast[1.0-3.0] + brightness[0-255]

                doBlur(input_image, input_image, params.blur, params.blurAmount);

                if (selectedView == View.raw)
                    preview_image = input_image.clone();

                if (params.enableDistortion) {

                    if (cameraMatrix == null)
                        cameraMatrix = Imgproc.getDefaultNewCameraMatrix(Mat.eye(3, 3, CvType.CV_64F),
                                new Size(input_image.width(), input_image.height()), true);

                    Imgproc.warpAffine(input_image, input_image, shiftMat, frameSize);

                    if (undistorted_image == null)
                        undistorted_image = new Mat((int) frameSize.width * 2, (int) frameSize.height * 2,
                                CvType.CV_64F);

                    Imgproc.undistort(input_image, undistorted_image, cameraMatrix, distCoeffs);

                    input_image = undistorted_image.clone();

                    if (selectedView == View.dist)
                        preview_image = input_image.clone();

                }

                //               if(background == null) background = input_image.clone();         
                //               if(recaptureBg){
                //                  backgSubstractor.apply(background, background);
                //                  System.out.println(background.channels() + " " + background.size() );
                //                  System.out.println(input_image.channels() + " " + input_image.size() );
                //                  recaptureBg = false;
                //               }
                //               if(dynamicBGRemoval){
                //                  //Imgproc.accumulateWeighted(input_image, background, dynamicBGAmount);
                //                  //Imgproc.accumulateWeighted(input_image, background, 1.0f);
                //                  //Core.subtract(input_image, background, input_image);
                //                  //Core.bitwise_xor(input_image, background, input_image);
                //
                //                  doBlur(input_image, background, Blur.normal_7x7, 0); //Blur a little, to get nicer result when substracting
                //                  backgSubstractor.apply(background, background, dynamicBGAmount);
                //               }
                //               if(background != null) Core.add(input_image, background, input_image);

                if (params.blobTracking) {
                    Mat blobs_image = input_image.clone();

                    Imgproc.threshold(blobs_image, blobs_image, params.blobThreshold, 254,
                            (params.blobThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY));

                    Size kernelSize = null;

                    switch (params.blobMorpthKernelSize) {
                    case size_3x3:
                        kernelSize = new Size(3, 3);
                        break;
                    case size_5x5:
                        kernelSize = new Size(5, 5);
                        break;
                    case size_7x7:
                        kernelSize = new Size(7, 7);
                        break;
                    case size_9x9:
                        kernelSize = new Size(9, 9);
                        break;
                    }

                    int kernelType = -1;

                    switch (params.blobMorphKernelShape) {
                    case ellipse:
                        kernelType = Imgproc.MORPH_ELLIPSE;
                        break;
                    case rect:
                        kernelType = Imgproc.MORPH_RECT;
                        break;
                    default:
                        break;
                    }

                    switch (params.blobMorphOps) {
                    case dilate:
                        Imgproc.dilate(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    case erode:
                        Imgproc.erode(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    default:
                        break;
                    }

                    if (blobFeatureDetector == null)
                        blobFeatureDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);

                    blobFeatureDetector.detect(blobs_image, mokp);
                    blobData.add(mokp);

                    if (selectedView == View.blob)
                        preview_image = blobs_image.clone();

                    blobs_image.release();
                }

                if (params.tripTracking) {

                    Mat trips_image = undistorted_image.clone();

                    if (params.tripEnableThresholding)
                        if (params.tripAdaptThreshold) {
                            Imgproc.adaptiveThreshold(trips_image, trips_image, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY),
                                    Imgproc.ADAPTIVE_THRESH_MEAN_C, 5, params.tripThreshold * 0.256f);
                        } else {
                            Imgproc.threshold(trips_image, trips_image, params.tripThreshold, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV
                                            : Imgproc.THRESH_BINARY));
                        }

                    switch (params.tripMorphOps) {
                    case dilate:
                        Imgproc.dilate(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    case erode:
                        Imgproc.erode(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    default:
                        break;
                    }

                    //Imgproc.HoughCircles(tres, circ, Imgproc.CV_HOUGH_GRADIENT, 1, tres.height()/8, 80, 1+p.par4, p.par5, p.par6);
                    Imgproc.HoughCircles(trips_image, circles, Imgproc.CV_HOUGH_GRADIENT, params.tripDP,
                            params.tripCenterDist, params.tripCannyThresh, params.tripAccumThresh,
                            params.tripRadMin, params.tripRadMax);

                    for (int i = 0; i < circles.cols(); i++) {

                        double[] coords = circles.get(0, i);

                        if (coords == null || coords[0] <= 1 || coords[1] <= 1)
                            continue; //If the circle is off the limits, or too small, don't process it.

                        TripcodeCandidateSample tc = new TripcodeCandidateSample(undistorted_image, coords);

                        if (tc.isValid())
                            tripcodeData.add(tc);

                    }

                    if (selectedView == View.trip)
                        preview_image = trips_image.clone();
                    trips_image.release();

                }

                if (preview_image != null) {
                    camPreviewPanel.updatePreviewImage(preview_image);
                    preview_image.release();
                }

            } else {
                System.out.println("frame/cam failiure!");
            }

        } catch (Exception e) {
            e.printStackTrace();
            running = false;
        }

        //FPS calculations
        if (camPreviewPanel != null) {
            long t = System.currentTimeMillis();
            detectTime = (t - lastFrameDetectTime);
            lastFrameDetectTime = t;
            camPreviewPanel.updateDetectTime(detectTime);
        }

    }

    //De-init
    circles.release();
    undistorted_image.release();
    input_image.release();
    inputVideo.release();
    shiftMat.release();
}

From source file:gab.opencv.OpenCV.java

License:Open Source License

/**
 * Erode the image. Erosion  is a morphological operation (i.e. it affects the shape) often used to
 * close holes in contours. It contracts white areas of the image.
 * /*from w  ww.  j  av a  2s. c  o m*/
 * See:
 * http://docs.opencv.org/java/org/opencv/imgproc/Imgproc.html#erode(org.opencv.core.Mat, org.opencv.core.Mat, org.opencv.core.Mat)
 * 
 */
public void erode() {
    Imgproc.erode(getCurrentMat(), getCurrentMat(), new Mat());
}

From source file:gab.opencv.OpenCVProcessingUtils.java

License:Open Source License

public void erode() {
    Imgproc.erode(getCurrentMat(), getCurrentMat(), new Mat());
}

From source file:jarvis.module.colourtracking.ColourTrackingModule.java

@Override
public void run() {
    while (running) {
        // Get webcam image
        if (cap != null)
            cap.read(imageOriginal);//from  w w  w  .  ja va 2  s  . c o  m
        // Convert from BGR to HSV
        Imgproc.cvtColor(imageOriginal, imageHSV, Imgproc.COLOR_RGB2HSV);
        // Threshold
        Core.inRange(imageHSV, new Scalar(lowH.getValue(), lowS.getValue(), lowV.getValue()),
                new Scalar(highH.getValue(), highS.getValue(), highV.getValue()), imageThresholded);
        // Remove small objects in the foreground 'morphological opening'
        Mat structure = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
        Imgproc.erode(imageThresholded, imageThresholded, structure);
        Imgproc.dilate(imageThresholded, imageThresholded, structure);
        // Remove holes in the foreground 'morphological closing'
        Imgproc.dilate(imageThresholded, imageThresholded, structure);
        Imgproc.erode(imageThresholded, imageThresholded, structure);

        // Get the spatial moment
        //            Moments moments = Imgproc.moments(imageThresholded);
        //            int area = (int)moments.get_m00();

        Imgproc.cvtColor(imageThresholded, imageThresholded, Imgproc.COLOR_GRAY2RGB);
        if (cap != null)
            frame.getContentPane().getGraphics().drawImage(mat2image.getImage(imageThresholded), 0, 50, null);
        if (cap != null)
            frame.repaint();
    }
}

From source file:kamerka.Filters.java

public void erosion(String sourcePath, int size) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR);
    Mat destination = new Mat(source.rows(), source.cols(), source.type());
    destination = source;/*from www  .j a v  a  2s.c o m*/
    Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * size + 1, 2 * size + 1));
    Imgproc.erode(source, destination, element);
    Highgui.imwrite(sourcePath, destination);
}