Example usage for org.opencv.core Mat get

List of usage examples for org.opencv.core Mat get

Introduction

In this page you can find the example usage for org.opencv.core Mat get.

Prototype

public double[] get(int row, int col) 

Source Link

Usage

From source file:com.projectcs2103t.openglestest.OpenGLES20Activity.java

License:Apache License

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat rgba = inputFrame.rgba();/*  w w w  .  j a  v  a 2 s  . co m*/
    float projection[] = mCameraProjectionAdapter.getProjectionGL();
    Mat CameraMat = mCameraProjectionAdapter.getCVCameraMat();
    Mat DistortionMat = mCameraProjectionAdapter.getCVDistortionMat();
    Mat ModelViewMat = new Mat(4, 4, CvType.CV_64FC1);
    int detected = nl.processFrame(rgba.getNativeObjAddr(), CameraMat.getNativeObjAddr(),
            DistortionMat.getNativeObjAddr(), ModelViewMat.getNativeObjAddr());
    float mGLModelView[] = null;
    if (detected == 1) {
        mGLModelView = new float[16];
        mGLModelView[0] = (float) ModelViewMat.get(0, 0)[0];
        mGLModelView[1] = (float) ModelViewMat.get(0, 1)[0];
        mGLModelView[2] = (float) ModelViewMat.get(0, 2)[0];
        mGLModelView[3] = (float) ModelViewMat.get(0, 3)[0];
        mGLModelView[4] = (float) ModelViewMat.get(1, 0)[0];
        mGLModelView[5] = (float) ModelViewMat.get(1, 1)[0];
        mGLModelView[6] = (float) ModelViewMat.get(1, 2)[0];
        mGLModelView[7] = (float) ModelViewMat.get(1, 3)[0];
        mGLModelView[8] = (float) ModelViewMat.get(2, 0)[0];
        mGLModelView[9] = (float) ModelViewMat.get(2, 1)[0];
        mGLModelView[10] = (float) ModelViewMat.get(2, 2)[0];
        mGLModelView[11] = (float) ModelViewMat.get(2, 3)[0];
        mGLModelView[12] = (float) ModelViewMat.get(3, 0)[0];
        mGLModelView[13] = (float) ModelViewMat.get(3, 1)[0];
        mGLModelView[14] = (float) ModelViewMat.get(3, 2)[0];
        mGLModelView[15] = (float) ModelViewMat.get(3, 3)[0];
        //showMatrices(rgba, ModelViewMat);
    }
    mCameraProjectionAdapter.setModelViewGL(mGLModelView);
    Imgproc.putText(rgba, mCameraProjectionAdapter.toString(), new Point(50, 50), Core.FONT_HERSHEY_PLAIN, 1.0,
            new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mGLView.toString(), new Point(50, 75), Core.FONT_HERSHEY_PLAIN, 1.0,
            new Scalar(0, 255, 0));
    return rgba;
}

From source file:com.projectcs2103t.openglestest.OpenGLES20Activity.java

License:Apache License

private void showMatrices(Mat rgba, Mat mat) {
    double m00 = mat.get(0, 0)[0];
    double m01 = mat.get(0, 1)[0];
    double m02 = mat.get(0, 2)[0];
    double m03 = mat.get(0, 3)[0];
    double m10 = mat.get(1, 0)[0];
    double m11 = mat.get(1, 1)[0];
    double m12 = mat.get(1, 2)[0];
    double m13 = mat.get(1, 3)[0];
    double m20 = mat.get(2, 0)[0];
    double m21 = mat.get(2, 1)[0];
    double m22 = mat.get(2, 2)[0];
    double m23 = mat.get(2, 3)[0];
    double m30 = mat.get(3, 0)[0];
    double m31 = mat.get(3, 1)[0];
    double m32 = mat.get(3, 2)[0];
    double m33 = mat.get(3, 3)[0];
    //String camMatStr = cameraMat.dump();
    String mRow0 = "|" + m00 + "," + m01 + "," + m02 + "," + m03 + "|";
    String mRow1 = "|" + m10 + "," + m11 + "," + m12 + "," + m13 + "|";
    String mRow2 = "|" + m20 + "," + m21 + "," + m22 + "," + m23 + "|";
    String mRow3 = "|" + m30 + "," + m31 + "," + m32 + "," + m33 + "|";
    Imgproc.putText(rgba, "Model-View-Mat:", new Point(50, 100), Core.FONT_HERSHEY_PLAIN, 1.0,
            new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mRow0, new Point(50, 125), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mRow1, new Point(50, 150), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mRow2, new Point(50, 175), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mRow3, new Point(50, 200), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0));

}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return//from  www  .  ja  v  a  2  s . c  o m
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();

    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);

    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);

    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        String tempFile = File.createTempFile("img", ".png").getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {

    }

    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();

    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: "
                + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException(
                "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }

    matcher.match(objectDescriptor, sceneDescriptor, matches);

    double maxDist = 0;
    double minDist = 10000;

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }

    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);

    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);

    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch,
            Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);

    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }

    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();

    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();

    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }

    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);

    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);

    objectCorners.put(0, 0, new double[] { 0, 0 });
    objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 });
    objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() });
    objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() });

    Core.perspectiveTransform(objectCorners, sceneCorners, hg);

    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));

    // point of object in scene
    Point p1 = new Point(sceneCorners.get(0, 0)); // top left
    Point p2 = new Point(sceneCorners.get(1, 0)); // top right
    Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right
    Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left

    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    logger.debug(p1); // top left
    logger.debug(p2); // top right
    logger.debug(p3); // bottom right
    logger.debug(p4); // bottom left

    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });

            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);

            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }

    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);

    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270
    reworkOnScenePoints(p1, p2, p3, p4);

    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);

    recordDetectedRectangle(p1, p2, p3, p4);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private Optional<Long> checkForFrameChange(BufferedImage frame) {
    Mat mat;//from   ww w.  j  ava  2 s. co m

    synchronized (frame) {
        undistortFrame(frame);
        mat = Camera.bufferedImageToMat(frame);
    }

    final double[] pixel = getFrameDelayPixel(mat);

    // Initialize
    if (patternLuminosity[0] == -1) {
        patternLuminosity = pixel;
        return Optional.empty();
    }

    final Mat tempMat = new Mat(1, 2, CvType.CV_8UC3);
    tempMat.put(0, 0, patternLuminosity);
    tempMat.put(0, 1, pixel);

    Imgproc.cvtColor(tempMat, tempMat, Imgproc.COLOR_BGR2HSV);

    if (tempMat.get(0, 1)[2] < .9 * tempMat.get(0, 0)[2]) {
        return Optional.of(cameraManager.getCurrentFrameTimestamp() - frameTimestampBeforeFrameChange);
    }

    return Optional.empty();
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private double[] getFrameDelayPixel(Mat mat) {
    final double squareHeight = boundsResult.getHeight() / (double) (PATTERN_HEIGHT + 1);
    final double squareWidth = boundsResult.getWidth() / (double) (PATTERN_WIDTH + 1);

    final int secondSquareCenterX = (int) (boundsResult.getMinX() + (squareWidth * 1.5));
    final int secondSquareCenterY = (int) (boundsResult.getMinY() + (squareHeight * .5));

    return mat.get(secondSquareCenterY, secondSquareCenterX);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

public Optional<Bounds> calibrateFrame(MatOfPoint2f boardCorners, Mat mat) {

    // For debugging
    Mat traceMat = null;//  ww w  . j  a v  a  2s.  c  om
    if (logger.isTraceEnabled()) {
        traceMat = mat.clone();
    }

    initializeSize(mat.cols(), mat.rows());

    // Step 2: Estimate the pattern corners
    MatOfPoint2f estimatedPatternRect = estimatePatternRect(traceMat, boardCorners);

    // Step 3: Use Hough Lines to find the actual corners
    final Optional<MatOfPoint2f> idealCorners = findIdealCorners(mat, estimatedPatternRect);

    if (!idealCorners.isPresent())
        return Optional.empty();

    if (logger.isTraceEnabled()) {
        String filename = String.format("calibrate-dist.png");
        final File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Step 4: Initialize the warp matrix and bounding box
    initializeWarpPerspective(mat, idealCorners.get());

    if (boundingBox.getMinX() < 0 || boundingBox.getMinY() < 0
            || boundingBox.getWidth() > cameraManager.getFeedWidth()
            || boundingBox.getHeight() > cameraManager.getFeedHeight()) {
        return Optional.empty();
    }

    if (logger.isDebugEnabled())
        logger.debug("bounds {} {} {} {}", boundingBox.getMinX(), boundingBox.getMinY(), boundingBox.getWidth(),
                boundingBox.getHeight());

    final Mat undistorted = warpPerspective(mat);

    if (logger.isTraceEnabled()) {

        String filename = String.format("calibrate-undist.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, undistorted);

        Mat undistortedCropped = undistorted.submat((int) boundingBox.getMinY(), (int) boundingBox.getMaxY(),
                (int) boundingBox.getMinX(), (int) boundingBox.getMaxX());

        filename = String.format("calibrate-undist-cropped.png");
        file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, undistortedCropped);
    }

    Mat warpedBoardCorners = warpCorners(boardCorners);

    isCalibrated = true;

    if (calculateFrameDelay) {
        findColors(undistorted, warpedBoardCorners);

        final double squareHeight = boundingBox.getHeight() / (double) (PATTERN_HEIGHT + 1);
        final double squareWidth = boundingBox.getWidth() / (double) (PATTERN_WIDTH + 1);

        int secondSquareCenterX = (int) (boundingBox.getMinX() + (squareWidth * 1.5));
        int secondSquareCenterY = (int) (boundingBox.getMinY() + (squareHeight * .5));

        if (logger.isDebugEnabled())
            logger.debug("pF getFrameDelayPixel x {} y {} p {}", secondSquareCenterX, secondSquareCenterY,
                    undistorted.get(secondSquareCenterY, secondSquareCenterX));

    }

    return Optional.of(boundingBox);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private Optional<MatOfPoint2f> findIdealCorners(final Mat frame, final MatOfPoint2f estimatedPatternRect) {
    Mat traceMat = null;// www .  ja va  2  s . co  m
    if (logger.isTraceEnabled()) {
        Mat traceMatTemp = frame.clone();
        traceMat = new Mat();

        Imgproc.cvtColor(traceMatTemp, traceMat, Imgproc.COLOR_GRAY2BGR);
    }

    // pixel distance, dynamic because we want to allow any resolution or
    // distance from pattern
    final int toleranceThreshold = (int) (minimumDimension / (double) (PATTERN_HEIGHT - 1) / 1.5);

    // Grey scale conversion.
    //final Mat grey = new Mat();
    //Imgproc.cvtColor(frame, grey, Imgproc.COLOR_BGR2GRAY);
    final Mat grey = frame;

    // Find edges
    Imgproc.Canny(grey, grey, CANNY_THRESHOLD_1, CANNY_THRESHOLD_2);

    // Blur the lines, otherwise the lines algorithm does not consider them
    Imgproc.GaussianBlur(grey, grey, gaussianBlurSize, GAUSSIANBLUR_SIGMA);

    if (logger.isTraceEnabled()) {
        logger.trace("tolerance threshold {} minimumDimension {}", toleranceThreshold, minimumDimension);

        String filename = String.format("calibrate-undist-grey-lines.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, grey);
    }

    if (logger.isDebugEnabled())
        logger.debug("estimation {} {} {} {}", estimatedPatternRect.get(0, 0), estimatedPatternRect.get(1, 0),
                estimatedPatternRect.get(2, 0), estimatedPatternRect.get(3, 0));

    // Easier to work off of Points
    final Point[] estimatedPoints = matOfPoint2fToPoints(estimatedPatternRect);

    if (logger.isTraceEnabled()) {
        Core.circle(traceMat, estimatedPoints[0], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[1], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[2], 1, new Scalar(0, 0, 255), -1);
        Core.circle(traceMat, estimatedPoints[3], 1, new Scalar(0, 0, 255), -1);
    }

    // Find lines
    // These parameters are just guesswork right now
    final Mat mLines = new Mat();
    final int minLineSize = (int) (minimumDimension * .90);
    final int lineGap = toleranceThreshold;

    // Do it
    Imgproc.HoughLinesP(grey, mLines, HOUGHLINES_RHO, HOUGHLINES_THETA, HOUGHLINES_THRESHOLD, minLineSize,
            lineGap);

    // Find the lines that match our estimates
    final Set<double[]> verifiedLines = new HashSet<double[]>();

    for (int x = 0; x < mLines.cols(); x++) {
        final double[] vec = mLines.get(0, x);
        final double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        final Point start = new Point(x1, y1);
        final Point end = new Point(x2, y2);

        if (nearPoints(estimatedPoints, start, toleranceThreshold)
                && nearPoints(estimatedPoints, end, toleranceThreshold)) {
            verifiedLines.add(vec);

            if (logger.isTraceEnabled()) {
                Core.line(traceMat, start, end, new Scalar(255, 0, 0), 1);
            }
        }
    }

    if (logger.isTraceEnabled())
        logger.trace("verifiedLines: {}", verifiedLines.size());

    // Reduce the lines to possible corners
    final Set<Point> possibleCorners = new HashSet<Point>();

    for (double[] line1 : verifiedLines) {
        for (double[] line2 : verifiedLines) {
            if (line1 == line2)
                continue;

            Optional<Point> intersection = computeIntersect(line1, line2);

            if (intersection.isPresent())
                possibleCorners.add(intersection.get());
        }
    }

    // Reduce the possible corners to ideal corners
    Point[] idealCorners = new Point[4];
    final double[] idealDistances = { toleranceThreshold, toleranceThreshold, toleranceThreshold,
            toleranceThreshold };

    for (Point pt : possibleCorners) {
        for (int i = 0; i < 4; i++) {
            final double distance = euclideanDistance(pt, estimatedPoints[i]);

            if (distance < idealDistances[i]) {
                idealDistances[i] = distance;
                idealCorners[i] = pt;
            }
        }
    }

    if (logger.isTraceEnabled()) {
        logger.trace("idealDistances {} {} {} {}", idealDistances[0], idealDistances[1], idealDistances[2],
                idealDistances[3]);

        String filename = String.format("calibrate-lines.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Verify that we have the corners we need
    for (Point pt : idealCorners) {
        if (pt == null)
            return Optional.empty();

        if (logger.isTraceEnabled()) {
            logger.trace("idealCorners {}", pt);
            Core.circle(traceMat, pt, 1, new Scalar(0, 255, 255), -1);
        }
    }

    if (logger.isTraceEnabled()) {
        String filename = String.format("calibrate-lines-with-corners.png");
        File file = new File(filename);
        filename = file.toString();
        Highgui.imwrite(filename, traceMat);
    }

    // Sort them into the correct order
    // 1st-------2nd
    // | |
    // | |
    // | |
    // 3rd-------4th
    idealCorners = sortCorners(idealCorners);

    // build the MatofPoint2f
    final MatOfPoint2f sourceCorners = new MatOfPoint2f();
    sourceCorners.alloc(4);

    for (int i = 0; i < 4; i++) {
        sourceCorners.put(i, 0, new double[] { idealCorners[i].x, idealCorners[i].y });
    }

    return Optional.of(sourceCorners);
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private Point findChessBoardSquareCenter(Mat corners, int row, int col) {
    if (row >= PATTERN_HEIGHT - 1 || col >= PATTERN_WIDTH - 1) {
        logger.warn("findChessBoardSquareColor invalid row or col {} {}", row, col);
        return null;
    }/*from   www  . j ava 2 s. c  o m*/

    final Point topLeft = new Point(corners.get((row * PATTERN_WIDTH - 1) + col, 0)[0],
            corners.get((row * PATTERN_WIDTH - 1) + col, 0)[1]);
    final Point bottomRight = new Point(corners.get(((row + 1) * PATTERN_WIDTH - 1) + col + 1, 0)[0],
            corners.get(((row + 1) * PATTERN_WIDTH - 1) + col + 1, 0)[1]);

    final Point result = new Point((topLeft.x + bottomRight.x) / 2, (topLeft.y + bottomRight.y) / 2);

    if (logger.isTraceEnabled()) {
        logger.trace("findChessBoardSquareColor {}", corners.size());

        logger.trace("findChessBoardSquareColor {} {}", (row * PATTERN_WIDTH - 1) + col,
                ((row + 1) * PATTERN_WIDTH - 1) + col + 1);
        logger.trace("findChessBoardSquareColor {} {} {}", topLeft, bottomRight, result);
    }

    return result;
}

From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java

License:Open Source License

private Point rotPoint(final Mat rot_mat, final Point point) {
    final Point rp = new Point();
    rp.x = rot_mat.get(0, 0)[0] * point.x + rot_mat.get(0, 1)[0] * point.y + rot_mat.get(0, 2)[0];
    rp.y = rot_mat.get(1, 0)[0] * point.x + rot_mat.get(1, 1)[0] * point.y + rot_mat.get(1, 2)[0];

    return rp;/*from w  ww. ja  v a2 s  .  c o m*/
}

From source file:com.untref.bordes.HoughCirculos.java

public static BufferedImage implementarCiculos(BufferedImage screen, int acumulador, int radioMin,
        int radioMax) {
    Mat source = new Mat(screen.getHeight(), screen.getWidth(), CvType.CV_8UC3);
    byte[] data = ((DataBufferByte) screen.getRaster().getDataBuffer()).getData();
    source.put(0, 0, data);/*from  w ww .  j  av a 2s.  c om*/
    //ImageIO.write(screen, "jpg", "imagen");
    //Mat source = Highgui.imread("test.jpg", Highgui.CV_LOAD_IMAGE_COLOR);
    Mat destination = new Mat(source.rows(), source.cols(), source.type());

    Imgproc.cvtColor(source, destination, Imgproc.COLOR_RGB2GRAY);

    Imgproc.GaussianBlur(destination, destination, new Size(3, 3), 0, 0);

    Mat circles = new Mat();
    Imgproc.HoughCircles(destination, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 30, 10, acumulador, radioMin,
            radioMax);

    int radius;
    org.opencv.core.Point pt;
    for (int x = 0; x < circles.cols(); x++) {
        double vCircle[] = circles.get(0, x);

        if (vCircle == null) {
            break;
        }

        pt = new org.opencv.core.Point(Math.round(vCircle[0]), Math.round(vCircle[1]));
        radius = (int) Math.round(vCircle[2]);

        // draw the found circle
        Core.circle(source, pt, radius, new Scalar(150, 0, 0), 2);
        Core.circle(source, pt, 1, new Scalar(0, 0, 0), 2);
    }
    BufferedImage res = matToBufferedImage(source);

    return res;

}