List of usage examples for org.opencv.core MatOfPoint2f MatOfPoint2f
public MatOfPoint2f()
From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java
License:Open Source License
public List<Point> extractPoints() { Mat gray = new Mat();//work_width, work_height, CvType.CV_8UC1); Mat binary = new Mat(); Mat kernel = Mat.ones(3, 3, CvType.CV_8UC1); debugreset();// w w w. j ava 2s . c o m Mat image = load_transformed(); working_image = image.clone(); debugsave(image, "source"); Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY); debugsave(gray, "grayscale"); Imgproc.GaussianBlur(gray, gray, new Size(15, 15), 0); debugsave(gray, "blurred"); //Imgproc.equalizeHist(gray, gray); //debugsave(gray, "equalized"); Imgproc.adaptiveThreshold(gray, binary, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY_INV, 129, 5); //Imgproc.threshold(gray, binary, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU); //Imgproc.threshold(gray, binary, 128, 255, Imgproc.THRESH_BINARY_INV); debugsave(binary, "binary"); Imgproc.morphologyEx(binary, binary, Imgproc.MORPH_CLOSE, kernel); debugsave(binary, "closed"); Imgproc.morphologyEx(binary, binary, Imgproc.MORPH_OPEN, kernel); debugsave(binary, "opened"); List<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(binary, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); // is binary is now changed Imgproc.drawContours(image, contours, -1, new Scalar(0, 0, 255), 3); debugsave(image, "contours"); List<PointAndArea> points = new ArrayList<>(); for (MatOfPoint cnt : contours) { MatOfPoint2f c2f = new MatOfPoint2f(); c2f.fromArray(cnt.toArray()); RotatedRect rr = Imgproc.minAreaRect(c2f); double area = Imgproc.contourArea(cnt); if (rr.size.width / rr.size.height < 3 && rr.size.height / rr.size.width < 3 && rr.size.width < 64 && rr.size.height < 64 && area > 9 && area < 10000) { points.add(new PointAndArea((int) area, rr.center)); } } List<Point> final_points = new ArrayList<>(); Collections.sort(points); Collections.reverse(points); int prev = -1; for (PointAndArea p : points) { Log.i("area", Integer.toString(p.area)); if (prev == -1 || p.area >= prev / 2) { prev = p.area; Imgproc.circle(image, p.point, 10, new Scalar(0, 255, 0), 5); final_points.add(new Point(1 - p.point.y / work_height, 1 - p.point.x / work_width)); } } debugsave(image, "circles"); return final_points; }
From source file:com.github.rosjava_catkin_package_a.ARLocROS.ComputePose.java
License:Apache License
public boolean computePose(Mat rvec, Mat tvec, Mat image2) throws NyARException, FileNotFoundException { // convert image to NyAR style for processing final INyARRgbRaster imageRaster = NyARImageHelper.createFromMat(image2); // create new marker system configuration i_config = new NyARMarkerSystemConfig(i_param); markerSystemState = new NyARMarkerSystem(i_config); // Create wrapper that passes cam pictures to marker system cameraSensorWrapper = new NyARSensor(i_screen_size); ids = new int[markerPatterns.size()]; patternmap = new HashMap<>(); for (int i = 0; i < markerPatterns.size(); i++) { // create marker description from pattern file and add to marker // system ids[i] = markerSystemState.addARMarker(arCodes.get(i), 25, markerConfig.getMarkerSize()); patternmap.put(ids[i], markerPatterns.get(i)); }//from ww w . j av a2 s . c o m cameraSensorWrapper.update(imageRaster); markerSystemState.update(cameraSensorWrapper); // init 3D point list final List<Point3> points3dlist = new ArrayList<>(); final List<Point> points2dlist = new ArrayList<>(); for (final int id : ids) { // process only if this marker has been detected if (markerSystemState.isExistMarker(id) && markerSystemState.getConfidence(id) > 0.7) { // read and add 2D points final NyARIntPoint2d[] vertex2d = markerSystemState.getMarkerVertex2D(id); Point p = new Point(vertex2d[0].x, vertex2d[0].y); points2dlist.add(p); p = new Point(vertex2d[1].x, vertex2d[2].y); points2dlist.add(p); p = new Point(vertex2d[2].x, vertex2d[2].y); points2dlist.add(p); p = new Point(vertex2d[3].x, vertex2d[3].y); points2dlist.add(p); final MatOfPoint mop = new MatOfPoint(); mop.fromList(points2dlist); final List<MatOfPoint> pts = new ArrayList<>(); pts.add(mop); // read and add corresponding 3D points points3dlist.addAll(markerConfig.create3dpointlist(patternmap.get(id))); // draw red rectangle around detected marker Core.rectangle(image2, new Point(vertex2d[0].x, vertex2d[0].y), new Point(vertex2d[2].x, vertex2d[2].y), new Scalar(0, 0, 255)); } } // load 2D and 3D points to Mats for solvePNP final MatOfPoint3f objectPoints = new MatOfPoint3f(); objectPoints.fromList(points3dlist); final MatOfPoint2f imagePoints = new MatOfPoint2f(); imagePoints.fromList(points2dlist); if (visualization) { // show image with markers detected Imshow.show(image2); } // do not call solvePNP with empty intput data (no markers detected) if (points2dlist.size() == 0) { return false; } // uncomment these lines if using RANSAC-based pose estimation (more // shaking) Mat inliers = new Mat(); Calib3d.solvePnPRansac(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec, false, 300, 5, 16, inliers, Calib3d.CV_P3P); ARLoc.getLog().info("Points detected: " + points2dlist.size() + " inliers: " + inliers.size()); // avoid publish zero pose if localization failed if (inliers.rows() == 0) { return false; } return true; }
From source file:com.jonwohl.Attention.java
License:Open Source License
private Mat getPerspectiveTransformation(ArrayList<PVector> inputPoints, int w, int h) { Point[] canonicalPoints = new Point[4]; canonicalPoints[0] = new Point(0, 0); canonicalPoints[1] = new Point(w, 0); canonicalPoints[2] = new Point(w, h); canonicalPoints[3] = new Point(0, h); MatOfPoint2f canonicalMarker = new MatOfPoint2f(); canonicalMarker.fromArray(canonicalPoints); Point[] points = new Point[4]; for (int i = 0; i < 4; i++) { points[i] = new Point(inputPoints.get(i).x, inputPoints.get(i).y); }//from www . j a va 2 s.c o m MatOfPoint2f marker = new MatOfPoint2f(points); return Imgproc.getPerspectiveTransform(marker, canonicalMarker); }
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
/** * Compute the rectangle where the searched picture is and the rotation angle between both images * Throw {@link ImageSearchException} if picture is not found * @return/*w w w. jav a 2 s .co m*/ * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build */ public void detectCorrespondingZone() { Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); surf.detect(objectImageMat, objectKeyPoints); surf.detect(sceneImageMat, sceneKeyPoints); DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF); Mat objectDescriptor = new Mat(); Mat sceneDescriptor = new Mat(); surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor); surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor); try { Mat outImage = new Mat(); Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage); String tempFile = File.createTempFile("img", ".png").getAbsolutePath(); writeComparisonPictureToFile(tempFile, outImage); } catch (IOException e) { } // http://stackoverflow.com/questions/29828849/flann-for-opencv-java DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); MatOfDMatch matches = new MatOfDMatch(); if (objectKeyPoints.toList().isEmpty()) { throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: " + objectImage.getAbsolutePath()); } if (sceneKeyPoints.toList().isEmpty()) { throw new ImageSearchException( "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath()); } if (objectDescriptor.type() != CvType.CV_32F) { objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F); } if (sceneDescriptor.type() != CvType.CV_32F) { sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F); } matcher.match(objectDescriptor, sceneDescriptor, matches); double maxDist = 0; double minDist = 10000; for (int i = 0; i < objectDescriptor.rows(); i++) { double dist = matches.toList().get(i).distance; if (dist < minDist) { minDist = dist; } if (dist > maxDist) { maxDist = dist; } } logger.debug("-- Max dist : " + maxDist); logger.debug("-- Min dist : " + minDist); LinkedList<DMatch> goodMatches = new LinkedList<>(); MatOfDMatch gm = new MatOfDMatch(); for (int i = 0; i < objectDescriptor.rows(); i++) { if (matches.toList().get(i).distance < detectionThreshold) { goodMatches.addLast(matches.toList().get(i)); } } gm.fromList(goodMatches); Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch, Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS); if (goodMatches.isEmpty()) { throw new ImageSearchException("Cannot find matching zone"); } LinkedList<Point> objList = new LinkedList<>(); LinkedList<Point> sceneList = new LinkedList<>(); List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList(); List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList(); for (int i = 0; i < goodMatches.size(); i++) { objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt); sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt); } MatOfPoint2f obj = new MatOfPoint2f(); obj.fromList(objList); MatOfPoint2f scene = new MatOfPoint2f(); scene.fromList(sceneList); // Calib3d.RANSAC could be used instead of 0 Mat hg = Calib3d.findHomography(obj, scene, 0, 5); Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2); Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2); objectCorners.put(0, 0, new double[] { 0, 0 }); objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 }); objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() }); objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() }); Core.perspectiveTransform(objectCorners, sceneCorners, hg); // points of object Point po1 = new Point(objectCorners.get(0, 0)); Point po2 = new Point(objectCorners.get(1, 0)); Point po3 = new Point(objectCorners.get(2, 0)); Point po4 = new Point(objectCorners.get(3, 0)); // point of object in scene Point p1 = new Point(sceneCorners.get(0, 0)); // top left Point p2 = new Point(sceneCorners.get(1, 0)); // top right Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left logger.debug(po1); logger.debug(po2); logger.debug(po3); logger.debug(po4); logger.debug(p1); // top left logger.debug(p2); // top right logger.debug(p3); // bottom right logger.debug(p4); // bottom left if (debug) { try { // translate corners p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y }); p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y }); p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y }); p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y }); Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1); showResultingPicture(imgMatch); } catch (IOException e) { } } // check rotation angles checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4); // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270 reworkOnScenePoints(p1, p2, p3, p4); // check that aspect ratio of the detected height and width are the same checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4); recordDetectedRectangle(p1, p2, p3, p4); }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private MatOfPoint2f estimateFullPatternSize(MatOfPoint2f rect) { // Result Mat final MatOfPoint2f result = new MatOfPoint2f(); result.alloc(4);/*from ww w. j a va2 s. c om*/ // Get the sources as points final Point topLeft = new Point(rect.get(0, 0)[0], rect.get(0, 0)[1]); final Point topRight = new Point(rect.get(1, 0)[0], rect.get(1, 0)[1]); final Point bottomRight = new Point(rect.get(2, 0)[0], rect.get(2, 0)[1]); final Point bottomLeft = new Point(rect.get(3, 0)[0], rect.get(3, 0)[1]); // We need the heights and widths to estimate the square sizes final double topWidth = Math .sqrt(Math.pow(topRight.x - topLeft.x, 2) + Math.pow(topRight.y - topLeft.y, 2)); final double leftHeight = Math .sqrt(Math.pow(bottomLeft.x - topLeft.x, 2) + Math.pow(bottomLeft.y - topLeft.y, 2)); final double bottomWidth = Math .sqrt(Math.pow(bottomRight.x - bottomLeft.x, 2) + Math.pow(bottomRight.y - bottomLeft.y, 2)); final double rightHeight = Math .sqrt(Math.pow(bottomRight.x - topRight.x, 2) + Math.pow(bottomRight.y - topRight.y, 2)); if (logger.isTraceEnabled()) { logger.trace("points {} {} {} {}", topLeft, topRight, bottomRight, bottomLeft); double angle = Math.atan((topRight.y - topLeft.y) / (topRight.x - topLeft.x)) * 180 / Math.PI; double angle2 = Math.atan((bottomRight.y - bottomLeft.y) / (bottomRight.x - bottomLeft.x)) * 180 / Math.PI; logger.trace("square size {} {} - angle {}", topWidth / (PATTERN_WIDTH - 1), leftHeight / (PATTERN_HEIGHT - 1), angle); logger.trace("square size {} {} - angle {}", bottomWidth / (PATTERN_WIDTH - 1), rightHeight / (PATTERN_HEIGHT - 1), angle2); } // Estimate the square widths, that is what we base the estimate of the // real corners on double squareTopWidth = (1 + BORDER_FACTOR) * (topWidth / (PATTERN_WIDTH - 1)); double squareLeftHeight = (1 + BORDER_FACTOR) * (leftHeight / (PATTERN_HEIGHT - 1)); double squareBottomWidth = (1 + BORDER_FACTOR) * (bottomWidth / (PATTERN_WIDTH - 1)); double squareRightHeight = (1 + BORDER_FACTOR) * (rightHeight / (PATTERN_HEIGHT - 1)); // The estimations double[] newTopLeft = { topLeft.x - squareTopWidth, topLeft.y - squareLeftHeight }; double[] newBottomLeft = { bottomLeft.x - squareBottomWidth, bottomLeft.y + squareLeftHeight }; double[] newTopRight = { topRight.x + squareTopWidth, topRight.y - squareRightHeight }; double[] newBottomRight = { bottomRight.x + squareBottomWidth, bottomRight.y + squareRightHeight }; // Populate the result result.put(0, 0, newTopLeft); result.put(1, 0, newTopRight); result.put(2, 0, newBottomRight); result.put(3, 0, newBottomLeft); // Calculate the new heights (We don't need the widths but I'll leave // the code here commented out) // double newTopWidth = Math.sqrt(Math.pow(newTopRight[0] - // newTopLeft[0],2) + Math.pow(newTopRight[1] - newTopLeft[1],2)); // double newBottomWidth = Math.sqrt(Math.pow(newBottomRight[0] - // newBottomLeft[0],2) + Math.pow(newBottomRight[1] - // newBottomLeft[1],2)); double newLeftHeight = Math.sqrt( Math.pow(newBottomLeft[0] - newTopLeft[0], 2) + Math.pow(newBottomLeft[1] - newTopLeft[1], 2)); double newRightHeight = Math.sqrt( Math.pow(newBottomRight[0] - newTopRight[0], 2) + Math.pow(newBottomRight[1] - newTopRight[1], 2)); // The minimum dimension is always from the height because the pattern // is shorter on that side // Technically it is possible that the pattern is super stretched out, // but in that case I think we're // better off failing minimumDimension = newLeftHeight < newRightHeight ? newLeftHeight : newRightHeight; return result; }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private MatOfPoint2f rotateRect(Mat rotMat, MatOfPoint2f boardRect) { final MatOfPoint2f result = new MatOfPoint2f(); result.alloc(4);//from ww w.j a v a2 s . co m for (int i = 0; i < 4; i++) { final Point rPoint = rotPoint(rotMat, new Point(boardRect.get(i, 0)[0], boardRect.get(i, 0)[1])); final double[] rPointD = new double[2]; rPointD[0] = rPoint.x; rPointD[1] = rPoint.y; result.put(i, 0, rPointD); } return result; }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private Optional<MatOfPoint2f> findIdealCorners(final Mat frame, final MatOfPoint2f estimatedPatternRect) { Mat traceMat = null;//from ww w. j a v a2 s. c om if (logger.isTraceEnabled()) { Mat traceMatTemp = frame.clone(); traceMat = new Mat(); Imgproc.cvtColor(traceMatTemp, traceMat, Imgproc.COLOR_GRAY2BGR); } // pixel distance, dynamic because we want to allow any resolution or // distance from pattern final int toleranceThreshold = (int) (minimumDimension / (double) (PATTERN_HEIGHT - 1) / 1.5); // Grey scale conversion. //final Mat grey = new Mat(); //Imgproc.cvtColor(frame, grey, Imgproc.COLOR_BGR2GRAY); final Mat grey = frame; // Find edges Imgproc.Canny(grey, grey, CANNY_THRESHOLD_1, CANNY_THRESHOLD_2); // Blur the lines, otherwise the lines algorithm does not consider them Imgproc.GaussianBlur(grey, grey, gaussianBlurSize, GAUSSIANBLUR_SIGMA); if (logger.isTraceEnabled()) { logger.trace("tolerance threshold {} minimumDimension {}", toleranceThreshold, minimumDimension); String filename = String.format("calibrate-undist-grey-lines.png"); File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, grey); } if (logger.isDebugEnabled()) logger.debug("estimation {} {} {} {}", estimatedPatternRect.get(0, 0), estimatedPatternRect.get(1, 0), estimatedPatternRect.get(2, 0), estimatedPatternRect.get(3, 0)); // Easier to work off of Points final Point[] estimatedPoints = matOfPoint2fToPoints(estimatedPatternRect); if (logger.isTraceEnabled()) { Core.circle(traceMat, estimatedPoints[0], 1, new Scalar(0, 0, 255), -1); Core.circle(traceMat, estimatedPoints[1], 1, new Scalar(0, 0, 255), -1); Core.circle(traceMat, estimatedPoints[2], 1, new Scalar(0, 0, 255), -1); Core.circle(traceMat, estimatedPoints[3], 1, new Scalar(0, 0, 255), -1); } // Find lines // These parameters are just guesswork right now final Mat mLines = new Mat(); final int minLineSize = (int) (minimumDimension * .90); final int lineGap = toleranceThreshold; // Do it Imgproc.HoughLinesP(grey, mLines, HOUGHLINES_RHO, HOUGHLINES_THETA, HOUGHLINES_THRESHOLD, minLineSize, lineGap); // Find the lines that match our estimates final Set<double[]> verifiedLines = new HashSet<double[]>(); for (int x = 0; x < mLines.cols(); x++) { final double[] vec = mLines.get(0, x); final double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3]; final Point start = new Point(x1, y1); final Point end = new Point(x2, y2); if (nearPoints(estimatedPoints, start, toleranceThreshold) && nearPoints(estimatedPoints, end, toleranceThreshold)) { verifiedLines.add(vec); if (logger.isTraceEnabled()) { Core.line(traceMat, start, end, new Scalar(255, 0, 0), 1); } } } if (logger.isTraceEnabled()) logger.trace("verifiedLines: {}", verifiedLines.size()); // Reduce the lines to possible corners final Set<Point> possibleCorners = new HashSet<Point>(); for (double[] line1 : verifiedLines) { for (double[] line2 : verifiedLines) { if (line1 == line2) continue; Optional<Point> intersection = computeIntersect(line1, line2); if (intersection.isPresent()) possibleCorners.add(intersection.get()); } } // Reduce the possible corners to ideal corners Point[] idealCorners = new Point[4]; final double[] idealDistances = { toleranceThreshold, toleranceThreshold, toleranceThreshold, toleranceThreshold }; for (Point pt : possibleCorners) { for (int i = 0; i < 4; i++) { final double distance = euclideanDistance(pt, estimatedPoints[i]); if (distance < idealDistances[i]) { idealDistances[i] = distance; idealCorners[i] = pt; } } } if (logger.isTraceEnabled()) { logger.trace("idealDistances {} {} {} {}", idealDistances[0], idealDistances[1], idealDistances[2], idealDistances[3]); String filename = String.format("calibrate-lines.png"); File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, traceMat); } // Verify that we have the corners we need for (Point pt : idealCorners) { if (pt == null) return Optional.empty(); if (logger.isTraceEnabled()) { logger.trace("idealCorners {}", pt); Core.circle(traceMat, pt, 1, new Scalar(0, 255, 255), -1); } } if (logger.isTraceEnabled()) { String filename = String.format("calibrate-lines-with-corners.png"); File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, traceMat); } // Sort them into the correct order // 1st-------2nd // | | // | | // | | // 3rd-------4th idealCorners = sortCorners(idealCorners); // build the MatofPoint2f final MatOfPoint2f sourceCorners = new MatOfPoint2f(); sourceCorners.alloc(4); for (int i = 0; i < 4; i++) { sourceCorners.put(i, 0, new double[] { idealCorners[i].x, idealCorners[i].y }); } return Optional.of(sourceCorners); }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private void initializeWarpPerspective(final Mat frame, final MatOfPoint2f sourceCorners) { final MatOfPoint2f destCorners = new MatOfPoint2f(); destCorners.alloc(4);/* w w w . ja v a 2 s . co m*/ // 1st-------2nd // | | // | | // | | // 3rd-------4th destCorners.put(0, 0, new double[] { boundsRect.boundingRect().x, boundsRect.boundingRect().y }); destCorners.put(1, 0, new double[] { boundsRect.boundingRect().x + boundsRect.boundingRect().width, boundsRect.boundingRect().y }); destCorners.put(2, 0, new double[] { boundsRect.boundingRect().x, boundsRect.boundingRect().y + boundsRect.boundingRect().height }); destCorners.put(3, 0, new double[] { boundsRect.boundingRect().x + boundsRect.boundingRect().width, boundsRect.boundingRect().y + boundsRect.boundingRect().height }); if (logger.isDebugEnabled()) { logger.debug("initializeWarpPerspective {} {} {} {}", sourceCorners.get(0, 0), sourceCorners.get(1, 0), sourceCorners.get(2, 0), sourceCorners.get(3, 0)); logger.debug("initializeWarpPerspective {} {} {} {}", destCorners.get(0, 0), destCorners.get(1, 0), destCorners.get(2, 0), destCorners.get(3, 0)); } perspMat = Imgproc.getPerspectiveTransform(sourceCorners, destCorners); int width = boundsRect.boundingRect().width; int height = boundsRect.boundingRect().height; // Make them divisible by two for video recording purposes if ((width & 1) == 1) width++; if ((height & 1) == 1) height++; boundingBox = new BoundingBox(boundsRect.boundingRect().x, boundsRect.boundingRect().y, width, height); warpInitialized = true; if (logger.isTraceEnabled()) { Mat debugFrame = frame.clone(); Core.circle(debugFrame, new Point(sourceCorners.get(0, 0)[0], sourceCorners.get(0, 0)[1]), 1, new Scalar(255, 0, 255), -1); Core.circle(debugFrame, new Point(sourceCorners.get(1, 0)[0], sourceCorners.get(1, 0)[1]), 1, new Scalar(255, 0, 255), -1); Core.circle(debugFrame, new Point(sourceCorners.get(2, 0)[0], sourceCorners.get(2, 0)[1]), 1, new Scalar(255, 0, 255), -1); Core.circle(debugFrame, new Point(sourceCorners.get(3, 0)[0], sourceCorners.get(3, 0)[1]), 1, new Scalar(255, 0, 255), -1); Core.circle(debugFrame, new Point(destCorners.get(0, 0)[0], destCorners.get(0, 0)[1]), 1, new Scalar(255, 0, 0), -1); Core.circle(debugFrame, new Point(destCorners.get(1, 0)[0], destCorners.get(1, 0)[1]), 1, new Scalar(255, 0, 0), -1); Core.circle(debugFrame, new Point(destCorners.get(2, 0)[0], destCorners.get(2, 0)[1]), 1, new Scalar(255, 0, 0), -1); Core.circle(debugFrame, new Point(destCorners.get(3, 0)[0], destCorners.get(3, 0)[1]), 1, new Scalar(255, 0, 0), -1); Core.line(debugFrame, new Point(boundingBox.getMinX(), boundingBox.getMinY()), new Point(boundingBox.getMaxX(), boundingBox.getMinY()), new Scalar(0, 255, 0)); Core.line(debugFrame, new Point(boundingBox.getMinX(), boundingBox.getMinY()), new Point(boundingBox.getMinX(), boundingBox.getMaxY()), new Scalar(0, 255, 0)); Core.line(debugFrame, new Point(boundingBox.getMaxX(), boundingBox.getMaxY()), new Point(boundingBox.getMaxX(), boundingBox.getMinY()), new Scalar(0, 255, 0)); Core.line(debugFrame, new Point(boundingBox.getMaxX(), boundingBox.getMaxY()), new Point(boundingBox.getMinX(), boundingBox.getMaxY()), new Scalar(0, 255, 0)); String filename = String.format("calibrate-transformation.png"); File file = new File(filename); filename = file.toString(); Highgui.imwrite(filename, debugFrame); } }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
public Optional<MatOfPoint2f> findChessboard(Mat mat) { //Mat grayImage = new Mat(); //Imgproc.cvtColor(mat, grayImage, Imgproc.COLOR_BGR2GRAY); Mat grayImage = mat;/* ww w . j a va2 s .c o m*/ MatOfPoint2f imageCorners = new MatOfPoint2f(); boolean found = Calib3d.findChessboardCorners(grayImage, boardSize, imageCorners, Calib3d.CALIB_CB_NORMALIZE_IMAGE + Calib3d.CALIB_CB_ADAPTIVE_THRESH); logger.trace("found {}", found); if (found) { // optimization Imgproc.cornerSubPix(grayImage, imageCorners, new Size(1, 1), new Size(-1, -1), term); return Optional.of(imageCorners); } return Optional.empty(); }
From source file:com.shootoff.camera.autocalibration.AutoCalibrationManager.java
License:Open Source License
private MatOfPoint2f calcBoardRectFromCorners(MatOfPoint2f corners) { MatOfPoint2f result = new MatOfPoint2f(); result.alloc(4);//from w w w . j av a2 s . co m Point topLeft = new Point(corners.get(0, 0)[0], corners.get(0, 0)[1]); Point topRight = new Point(corners.get(PATTERN_WIDTH - 1, 0)[0], corners.get(PATTERN_WIDTH - 1, 0)[1]); Point bottomRight = new Point(corners.get(PATTERN_WIDTH * PATTERN_HEIGHT - 1, 0)[0], corners.get(PATTERN_WIDTH * PATTERN_HEIGHT - 1, 0)[1]); Point bottomLeft = new Point(corners.get(PATTERN_WIDTH * (PATTERN_HEIGHT - 1), 0)[0], corners.get(PATTERN_WIDTH * (PATTERN_HEIGHT - 1), 0)[1]); result.put(0, 0, topLeft.x, topLeft.y, topRight.x, topRight.y, bottomRight.x, bottomRight.y, bottomLeft.x, bottomLeft.y); return result; }