Example usage for org.opencv.core Mat clone

List of usage examples for org.opencv.core Mat clone

Introduction

In this page you can find the example usage for org.opencv.core Mat clone.

Prototype

public Mat clone() 

Source Link

Usage

From source file:org.firstinspires.ftc.teamcode.vision.VisionLib.java

public double getCenterVortexWidth() {
    Mat matIn = getCameraMat();/*from  w  ww  .  j a  va2 s.c o m*/
    if (matIn != null) {
        Log.d(TAG, "mat null");
        Imgproc.cvtColor(matIn, matIn, Imgproc.COLOR_RGB2HSV);

        Mat matMasked = new Mat(matIn.rows(), matIn.cols(), CvType.CV_8UC1);
        Scalar vortexLowerThresh = new Scalar(37, 46, 34);
        Scalar vortexUpperThresh = new Scalar(163, 255, 255);

        Core.inRange(matIn, BLUE_LOWER_THRESH, BLUE_UPPER_THRESH, matMasked);

        //find largest contour (the part of the beacon we are interested in
        ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Mat hierarchy = new Mat();
        Mat contourMat = matMasked.clone();
        Imgproc.findContours(contourMat, contours, hierarchy, Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);

        if (contours.size() > 1) {
            int largestContourIndex = 0;
            double lastContourArea = 0;
            for (int i = 0; i < contours.size(); i++) {
                double contourArea = Imgproc.contourArea(contours.get(i));
                if (contourArea > lastContourArea) {
                    largestContourIndex = i;
                    lastContourArea = contourArea;
                }
            }
            //get bounding rect
            Rect boundingRect = Imgproc
                    .boundingRect(new MatOfPoint(contours.get(largestContourIndex).toArray()));
            Core.rectangle(matIn, new Point(boundingRect.x, boundingRect.y),
                    new Point(boundingRect.x + boundingRect.width, boundingRect.y + boundingRect.height),
                    OPEN_CV_GREEN);

            saveMatToDisk(matIn);//debug only

            return boundingRect.width;
        }
    }
    return -1;
}

From source file:org.firstinspires.ftc.teamcode.vision.VisionLib.java

public int getBlueSide() {
    Mat matIn = getCameraMat();//from  w ww.  jav  a  2  s. c o m
    if (matIn != null) {
        Imgproc.cvtColor(matIn, matIn, Imgproc.COLOR_RGB2HSV);

        Mat matMasked = new Mat(matIn.rows(), matIn.cols(), CvType.CV_8UC1);
        Core.inRange(matIn, BLUE_LOWER_THRESH, BLUE_UPPER_THRESH, matMasked);

        //find largest contour (the part of the beacon we are interested in
        ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Mat hierarchy = new Mat();
        Mat contourMat = matMasked.clone();
        Imgproc.findContours(contourMat, contours, hierarchy, Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);

        if (contours.size() > 1) {
            int largestContourIndex = 0;
            double lastContourArea = 0;
            for (int i = 0; i < contours.size(); i++) {
                double contourArea = Imgproc.contourArea(contours.get(i));
                if (contourArea > lastContourArea) {
                    largestContourIndex = i;
                    lastContourArea = contourArea;
                }
            }
            //get bounding rect
            Rect boundingRect = Imgproc
                    .boundingRect(new MatOfPoint(contours.get(largestContourIndex).toArray()));
            Core.rectangle(matIn, new Point(boundingRect.x, boundingRect.y),
                    new Point(boundingRect.x + boundingRect.width, boundingRect.y + boundingRect.height),
                    OPEN_CV_GREEN);

            //                saveMatToDisk(matIn);//debug only
            //find which side its on
            if (boundingRect.x > matIn.cols() / 2) {//depends on which camera we use
                Log.d(TAG, "left");
                return BLUE_LEFT;
            } else {
                Log.d(TAG, "right");
                return BLUE_RIGHT;
            }
        }
        Log.d(TAG, "countors:" + contours.size());
        return TEST_FAILED;
    }
    return TEST_FAILED;
}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

/**
 * I'am working here./*from   ww w  .  j a v a 2 s. co m*/
 * 
 * @return
 */
public List<Mat> processImagePointBlackBiggerThanPointWhite() {

    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    // Step 1
    Mat mat = originalMat.createGrayImage();

    ThresholdMat thresholdMat = new ThresholdMat(mat.clone(), 0, 255, Imgproc.THRESH_OTSU);
    Mat threshold = thresholdMat.createMatResult();

    MorphologyMatBase closeMat = new MorphologyMatBase(threshold, Imgproc.MORPH_RECT, 1, 1, 1);
    Mat close = closeMat.dilate();

    RectangleDetection rectangleDetection = new RectangleDetection(close);

    List<MatOfPoint> contoursDetectPlate = rectangleDetection.executeRectangleDetection();

    SubMat subMatDetectPlate = new SubMat(mat, contoursDetectPlate);

    // Get plate detected
    List<Mat> detectPlates = subMatDetectPlate.dropImage();
    Mat matDetectPlate = detectPlates.get(5);

    ImageResult imageResult = new ImageResult(matDetectPlate, "Result ");
    imageResult.showResultImage();

    // pre-process
    Mat matResult = new Mat(matDetectPlate.cols() * 2, matDetectPlate.rows() * 2, matDetectPlate.type());
    Imgproc.resize(matDetectPlate, matResult, new Size(matDetectPlate.cols() * 1, matDetectPlate.rows() * 1));

    ThresholdMat thresholdMatDetectPlate = new ThresholdMat(matResult, 0, 255, Imgproc.THRESH_OTSU);

    Mat thresholdMatDetectPlateMat = thresholdMatDetectPlate.createMatResult();

    ImageResult imageResults = new ImageResult(thresholdMatDetectPlateMat, "Result ");
    imageResults.showResultImage();

    CharacterSegment characterSegment = new CharacterSegment(thresholdMatDetectPlateMat.clone());

    List<MatOfPoint> contoursNumber = characterSegment.executeCharacterSegment();

    System.out.println(contoursNumber.size());
    SubMat subMatNumberImg = new SubMat(thresholdMatDetectPlateMat.clone(), contoursNumber);

    List<Mat> listNumberImg = subMatNumberImg.dropImage();

    return listNumberImg;
}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

public Map<String, List<Mat>> processImagePointBlackBiggerThanPointWhiteTest() {

    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    // Step 1/*from ww  w . j a  va2  s  .  c  o  m*/
    Mat mat = originalMat.createGrayImage();

    ThresholdMat thresholdMat = new ThresholdMat(mat.clone(), 0, 255, Imgproc.THRESH_OTSU);
    Mat threshold = thresholdMat.createMatResult();

    MorphologyMatBase closeMat = new MorphologyMatBase(threshold, Imgproc.MORPH_RECT, 1, 1, 1);
    Mat close = closeMat.dilate();

    RectangleDetection rectangleDetection = new RectangleDetection(close);

    List<MatOfPoint> contoursDetectPlate = rectangleDetection.executeRectangleDetection();

    SubMat subMatDetectPlate = new SubMat(mat, contoursDetectPlate);

    // Get plate detected
    List<Mat> detectPlates = subMatDetectPlate.dropImage();
    Map<String, List<Mat>> mapNumberImg = new HashMap<String, List<Mat>>();

    for (int i = 0; i < detectPlates.size(); i++) {
        // pre-process
        Mat matDetectPlate = detectPlates.get(i);
        Mat matResult = new Mat(matDetectPlate.cols() * 2, matDetectPlate.rows() * 2, matDetectPlate.type());
        Imgproc.resize(matDetectPlate, matResult,
                new Size(matDetectPlate.cols() * 1, matDetectPlate.rows() * 1));

        ThresholdMat thresholdMatDetectPlate = new ThresholdMat(matResult, 0, 255, Imgproc.THRESH_OTSU);

        Mat thresholdMatDetectPlateMat = thresholdMatDetectPlate.createMatResult();

        CharacterSegment characterSegment = new CharacterSegment(thresholdMatDetectPlateMat.clone());

        List<MatOfPoint> contoursNumber = characterSegment.executeCharacterSegment();

        SubMat subMatNumberImg = new SubMat(thresholdMatDetectPlateMat.clone(), contoursNumber);

        mapNumberImg.put(String.valueOf(i), subMatNumberImg.dropImage());
    }
    return mapNumberImg;
}

From source file:org.lasarobotics.vision.detection.PrimitiveDetection.java

License:Open Source License

/**
 * Locate ellipses within an image//w  w  w .j  av a  2 s.  c  om
 *
 * @param grayImage Grayscale image
 * @return Ellipse locations
 */
public static EllipseLocationResult locateEllipses(Mat grayImage) {
    Mat gray = grayImage.clone();

    Filter.downsample(gray, 2);
    Filter.upsample(gray, 2);

    Imgproc.Canny(gray, gray, 5, 75, 3, true);
    Filter.dilate(gray, 2);

    Mat cacheHierarchy = new Mat();

    List<MatOfPoint> contoursTemp = new ArrayList<>();
    //Find contours - the parameters here are very important to compression and retention
    Imgproc.findContours(gray, contoursTemp, cacheHierarchy, Imgproc.CV_RETR_TREE,
            Imgproc.CHAIN_APPROX_TC89_KCOS);

    //List contours
    List<Contour> contours = new ArrayList<>();
    for (MatOfPoint co : contoursTemp) {
        contours.add(new Contour(co));
    }

    //Find ellipses by finding fit
    List<Ellipse> ellipses = new ArrayList<>();
    for (MatOfPoint co : contoursTemp) {
        contours.add(new Contour(co));
        //Contour must have at least 6 points for fitEllipse
        if (co.toArray().length < 6)
            continue;
        //Copy MatOfPoint to MatOfPoint2f
        MatOfPoint2f matOfPoint2f = new MatOfPoint2f(co.toArray());
        //Fit an ellipse to the current contour
        Ellipse ellipse = new Ellipse(Imgproc.fitEllipse(matOfPoint2f));

        //Draw ellipse
        ellipses.add(ellipse);
    }

    return new EllipseLocationResult(contours, ellipses);
}

From source file:org.lasarobotics.vision.detection.PrimitiveDetection.java

License:Open Source License

/**
 * Locate rectangles in an image//w  ww.ja  va2 s.  co  m
 *
 * @param grayImage Grayscale image
 * @return Rectangle locations
 */
public RectangleLocationResult locateRectangles(Mat grayImage) {
    Mat gray = grayImage.clone();

    //Filter out some noise by halving then doubling size
    Filter.downsample(gray, 2);
    Filter.upsample(gray, 2);

    //Mat is short for Matrix, and here is used to store an image.
    //it is n-dimensional, but as an image, is two-dimensional
    Mat cacheHierarchy = new Mat();
    Mat grayTemp = new Mat();
    List<Rectangle> rectangles = new ArrayList<>();
    List<Contour> contours = new ArrayList<>();

    //This finds the edges using a Canny Edge Detector
    //It is sent the grayscale Image, a temp Mat, the lower detection threshold for an edge,
    //the higher detection threshold, the Aperture (blurring) of the image - higher is better
    //for long, smooth edges, and whether a more accurate version (but time-expensive) version
    //should be used (true = more accurate)
    //Note: the edges are stored in "grayTemp", which is an image where everything
    //is black except for gray-scale lines delineating the edges.
    Imgproc.Canny(gray, grayTemp, 0, THRESHOLD_CANNY, APERTURE_CANNY, true);
    //make the white lines twice as big, while leaving the image size constant
    Filter.dilate(gray, 2);

    List<MatOfPoint> contoursTemp = new ArrayList<>();
    //Find contours - the parameters here are very important to compression and retention
    //grayTemp is the image from which the contours are found,
    //contoursTemp is where the resultant contours are stored (note: color is not retained),
    //cacheHierarchy is the parent-child relationship between the contours (e.g. a contour
    //inside of another is its child),
    //Imgproc.CV_RETR_LIST disables the hierarchical relationships being returned,
    //Imgproc.CHAIN_APPROX_SIMPLE means that the contour is compressed from a massive chain of
    //paired coordinates to just the endpoints of each segment (e.g. an up-right rectangular
    //contour is encoded with 4 points.)
    Imgproc.findContours(grayTemp, contoursTemp, cacheHierarchy, Imgproc.CV_RETR_LIST,
            Imgproc.CHAIN_APPROX_SIMPLE);
    //MatOfPoint2f means that is a MatofPoint (Matrix of Points) represented by floats instead of ints
    MatOfPoint2f approx = new MatOfPoint2f();
    //For each contour, test whether the contour is a rectangle
    //List<Contour> contours = new ArrayList<>()
    for (MatOfPoint co : contoursTemp) {
        //converting the MatOfPoint to MatOfPoint2f
        MatOfPoint2f matOfPoint2f = new MatOfPoint2f(co.toArray());
        //converting the matrix to a Contour
        Contour c = new Contour(co);

        //Attempt to fit the contour to the best polygon
        //input: matOfPoint2f, which is the contour found earlier
        //output: approx, which is the MatOfPoint2f that holds the new polygon that has less vertices
        //basically, it smooths out the edges using the third parameter as its approximation accuracy
        //final parameter determines whether the new approximation must be closed (true=closed)
        Imgproc.approxPolyDP(matOfPoint2f, approx, c.arcLength(true) * EPLISON_APPROX_TOLERANCE_FACTOR, true);

        //converting the MatOfPoint2f to a contour
        Contour approxContour = new Contour(approx);

        //Make sure the contour is big enough, CLOSED (convex), and has exactly 4 points
        if (approx.toArray().length == 4 && Math.abs(approxContour.area()) > 1000 && approxContour.isClosed()) {

            //TODO contours and rectangles array may not match up, but why would they?
            contours.add(approxContour);

            //Check each angle to be approximately 90 degrees
            //Done by comparing the three points constituting the angle of each corner
            double maxCosine = 0;
            for (int j = 2; j < 5; j++) {
                double cosine = Math.abs(MathUtil.angle(approx.toArray()[j % 4], approx.toArray()[j - 2],
                        approx.toArray()[j - 1]));
                maxCosine = Math.max(maxCosine, cosine);
            }

            if (maxCosine < MAX_COSINE_VALUE) {
                //Convert the points to a rectangle instance
                rectangles.add(new Rectangle(approx.toArray()));
            }
        }
    }

    return new RectangleLocationResult(contours, rectangles);
}

From source file:org.lasarobotics.vision.util.color.Color.java

License:Open Source License

/**
 * Convert a matrix in one color space to another
 *
 * @param in       Input matrix// w  w w .ja va2s  .c om
 * @param spaceIn  Input colorspace
 * @param spaceOut Output colorspace
 * @return Matrix in output colorspace
 */
public static Mat convertColorMat(Mat in, ColorSpace spaceIn, ColorSpace spaceOut) {
    if (spaceIn == spaceOut)
        return in;
    if (!spaceIn.canConvertTo(spaceOut))
        throw new IllegalArgumentException("Cannot convert color to the desired color space.");

    Mat output = in.clone();

    try {
        for (int i = 0; i < spaceIn.getConversionsTo(spaceOut).length; i += 3) {
            int conversion = spaceIn.getConversionsTo(spaceOut)[i];
            int inputDim = spaceIn.getConversionsTo(spaceOut)[i + 1];
            int outputDim = spaceIn.getConversionsTo(spaceOut)[i + 2];

            Imgproc.cvtColor(output, output, conversion, outputDim);
        }
    } catch (Exception ignored) {
        throw new IllegalArgumentException("Cannot convert color to the desired color space.");
    }

    return output;
}

From source file:org.openpnp.machine.reference.ReferenceCamera.java

License:Open Source License

private Mat offset(Mat mat, int offsetX, int offsetY) {
    if (offsetX == 0D && offsetY == 0D) {
        return mat;
    }//from   www .j a  v a  2  s.  com

    Mat mapMatrix = new Mat(2, 3, CvType.CV_32F) {
        {
            put(0, 0, 1, 0, offsetX);
            put(1, 0, 0, 1, offsetY);
        }
    };

    Mat dst = mat.clone();
    Imgproc.warpAffine(mat, dst, mapMatrix, mat.size(), Imgproc.INTER_LINEAR);
    mat.release();

    mapMatrix.release();

    return dst;
}

From source file:org.openpnp.machine.reference.ReferenceCamera.java

License:Open Source License

private Mat undistort(Mat mat) {
    if (!calibration.isEnabled()) {
        return mat;
    }//  w w  w. ja v a2 s  . c  o m

    if (undistortionMap1 == null || undistortionMap2 == null) {
        undistortionMap1 = new Mat();
        undistortionMap2 = new Mat();
        Mat rectification = Mat.eye(3, 3, CvType.CV_32F);
        Imgproc.initUndistortRectifyMap(calibration.getCameraMatrixMat(),
                calibration.getDistortionCoefficientsMat(), rectification, calibration.getCameraMatrixMat(),
                mat.size(), CvType.CV_32FC1, undistortionMap1, undistortionMap2);
        rectification.release();
    }

    Mat dst = mat.clone();
    Imgproc.remap(mat, dst, undistortionMap1, undistortionMap2, Imgproc.INTER_LINEAR);
    mat.release();

    return dst;
}

From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java

License:Open Source License

/**
 * Attempt to find matches of the given template within the current camera
 * frame. Matches are returned as TemplateMatch objects which contain
 * a Location in Camera coordinates. The results are sorted best score
 * to worst score./*ww  w.jav a2s . c o m*/
 * @param template
 * @return
 */
public List<TemplateMatch> getTemplateMatches(BufferedImage template) {
    // TODO: ROI
    BufferedImage image = camera.capture();

    // Convert the camera image and template image to the same type. This
    // is required by the cvMatchTemplate call.
    template = OpenCvUtils.convertBufferedImage(template, BufferedImage.TYPE_BYTE_GRAY);
    image = OpenCvUtils.convertBufferedImage(image, BufferedImage.TYPE_BYTE_GRAY);

    Mat templateMat = OpenCvUtils.toMat(template);
    Mat imageMat = OpenCvUtils.toMat(image);
    Mat resultMat = new Mat();

    Imgproc.matchTemplate(imageMat, templateMat, resultMat, Imgproc.TM_CCOEFF_NORMED);

    Mat debugMat = null;
    if (logger.isDebugEnabled()) {
        debugMat = imageMat.clone();
    }

    MinMaxLocResult mmr = Core.minMaxLoc(resultMat);
    double maxVal = mmr.maxVal;

    // TODO: Externalize?
    double threshold = 0.7f;
    double corr = 0.85f;

    double rangeMin = Math.max(threshold, corr * maxVal);
    double rangeMax = maxVal;

    List<TemplateMatch> matches = new ArrayList<TemplateMatch>();
    for (Point point : matMaxima(resultMat, rangeMin, rangeMax)) {
        TemplateMatch match = new TemplateMatch();
        int x = point.x;
        int y = point.y;
        match.score = resultMat.get(y, x)[0] / maxVal;

        if (logger.isDebugEnabled()) {
            Core.rectangle(debugMat, new org.opencv.core.Point(x, y),
                    new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()), new Scalar(255));
            Core.putText(debugMat, "" + match.score,
                    new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()),
                    Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(255));
        }

        Location offsets = getPixelCenterOffsets(x + (templateMat.cols() / 2), y + (templateMat.rows() / 2));
        match.location = camera.getLocation().subtract(offsets);
        matches.add(match);
    }

    Collections.sort(matches, new Comparator<TemplateMatch>() {
        @Override
        public int compare(TemplateMatch o1, TemplateMatch o2) {
            return ((Double) o2.score).compareTo(o1.score);
        }
    });

    saveDebugImage("template", templateMat);
    saveDebugImage("camera", imageMat);
    saveDebugImage("result", resultMat);
    saveDebugImage("debug", debugMat);

    return matches;
}