List of usage examples for org.opencv.core MatOfPoint2f toArray
public Point[] toArray()
From source file:org.usfirst.frc.team2084.CMonster2016.vision.Target.java
License:Open Source License
/** * Creates a new possible target based on the specified blob and calculates * its score.// w w w . j a v a2 s . c om * * @param p the shape of the possible target */ public Target(MatOfPoint contour, Mat grayImage) { // Simplify contour to make the corner finding algorithm work better MatOfPoint2f fContour = new MatOfPoint2f(); contour.convertTo(fContour, CvType.CV_32F); Imgproc.approxPolyDP(fContour, fContour, VisionParameters.getGoalApproxPolyEpsilon(), true); fContour.convertTo(contour, CvType.CV_32S); this.contour = contour; // Check area, and don't do any calculations if it is not valid if (validArea = validateArea()) { // Find a bounding rectangle RotatedRect rect = Imgproc.minAreaRect(fContour); Point[] rectPoints = new Point[4]; rect.points(rectPoints); for (int j = 0; j < rectPoints.length; j++) { Point rectPoint = rectPoints[j]; double minDistance = Double.MAX_VALUE; Point point = null; for (int i = 0; i < contour.rows(); i++) { Point contourPoint = new Point(contour.get(i, 0)); double dist = distance(rectPoint, contourPoint); if (dist < minDistance) { minDistance = dist; point = contourPoint; } } rectPoints[j] = point; } MatOfPoint2f rectMat = new MatOfPoint2f(rectPoints); // Refine the corners to improve accuracy Imgproc.cornerSubPix(grayImage, rectMat, new Size(4, 10), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1)); rectPoints = rectMat.toArray(); // Identify each corner SortedMap<Double, List<Point>> x = new TreeMap<>(); Arrays.stream(rectPoints).forEach((p) -> { List<Point> points; if ((points = x.get(p.x)) == null) { x.put(p.x, points = new LinkedList<>()); } points.add(p); }); int i = 0; for (Iterator<List<Point>> it = x.values().iterator(); it.hasNext();) { List<Point> s = it.next(); for (Point p : s) { switch (i) { case 0: topLeft = p; break; case 1: bottomLeft = p; break; case 2: topRight = p; break; case 3: bottomRight = p; } i++; } } // Organize corners if (topLeft.y > bottomLeft.y) { Point p = bottomLeft; bottomLeft = topLeft; topLeft = p; } if (topRight.y > bottomRight.y) { Point p = bottomRight; bottomRight = topRight; topRight = p; } // Create corners for centroid calculation corners = new MatOfPoint2f(rectPoints); // Calculate center Moments moments = Imgproc.moments(corners); center = new Point(moments.m10 / moments.m00, moments.m01 / moments.m00); // Put the points in the correct order for solvePNP rectPoints[0] = topLeft; rectPoints[1] = topRight; rectPoints[2] = bottomLeft; rectPoints[3] = bottomRight; // Recreate corners in the new order corners = new MatOfPoint2f(rectPoints); widthTop = distance(topLeft, topRight); widthBottom = distance(bottomLeft, bottomRight); width = (widthTop + widthBottom) / 2.0; heightLeft = distance(topLeft, bottomLeft); heightRight = distance(topRight, bottomRight); height = (heightLeft + heightRight) / 2.0; Mat tvec = new Mat(); // Calculate target's location Calib3d.solvePnP(OBJECT_POINTS, corners, CAMERA_MAT, DISTORTION_MAT, rotation, tvec, false, Calib3d.CV_P3P); // ======================================= // Position and Orientation Transformation // ======================================= double armAngle = VisionResults.getArmAngle(); // Flip y axis to point upward Core.multiply(tvec, SIGN_NORMALIZATION_MATRIX, tvec); // Shift origin to arm pivot point, on the robot's centerline CoordinateMath.translate(tvec, CAMERA_X_OFFSET, CAMERA_Y_OFFSET, ARM_LENGTH); // Align axes with ground CoordinateMath.rotateX(tvec, -armAngle); Core.add(rotation, new MatOfDouble(armAngle, 0, 0), rotation); // Shift origin to robot center of rotation CoordinateMath.translate(tvec, 0, ARM_PIVOT_Y_OFFSET, -ARM_PIVOT_Z_OFFSET); double xPosFeet = tvec.get(0, 0)[0]; double yPosFeet = tvec.get(1, 0)[0]; double zPosFeet = tvec.get(2, 0)[0]; // Old less effective aiming heading and distance calculation // double pixelsToFeet = TARGET_WIDTH / width; // distance = (TARGET_WIDTH * HighGoalProcessor.IMAGE_SIZE.width // / (2 * width ** Math.tan(VisionParameters.getFOVAngle() / 2))); // double xPosFeet = (center.x - (HighGoalProcessor.IMAGE_SIZE.width // / 2)) * pixelsToFeet; // double yPosFeet = -(center.y - // (HighGoalProcessor.IMAGE_SIZE.height / 2)) * pixelsToFeet; distance = Math.sqrt(xPosFeet * xPosFeet + zPosFeet * zPosFeet); position = new Point3(xPosFeet, yPosFeet, zPosFeet); xGoalAngle = Math.atan(xPosFeet / zPosFeet); yGoalAngle = Math.atan(yPosFeet / zPosFeet); validate(); score = calculateScore(); } else { valid = false; } }
From source file:simeav.filtros.instanciaciones.DetectorModulosEstandar.java
@Override public Mat detectarModulos(Mat original, Diagrama diagrama) { Imgproc.blur(original, original, new Size(15, 15)); original = Utils.dilate(original);/*w ww . j a va 2s.c o m*/ Mat jerarquia = new Mat(); ArrayList<MatOfPoint> contornos = new ArrayList<>(); Imgproc.findContours(original.clone(), contornos, jerarquia, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); ArrayList<MatOfPoint> cp = new ArrayList<>(contornos.size()); Map<Integer, Rect> rectangulos = new HashMap<>(); Integer id_cuadrado = 0; Mat resultado = Mat.zeros(original.size(), CvType.CV_8U); for (int i = contornos.size() - 1; i >= 0; i--) { if (jerarquia.get(0, i)[3] > -1) { MatOfPoint2f contorno2f = new MatOfPoint2f(); contorno2f.fromList(contornos.get(i).toList()); MatOfPoint2f c = new MatOfPoint2f(); Imgproc.approxPolyDP(contorno2f, c, 3, true); cp.add(new MatOfPoint(c.toArray())); int lados = cp.get(cp.size() - 1).height(); if ((4 <= lados) && lados < 12) { rectangulos.put(id_cuadrado, Imgproc.boundingRect(new MatOfPoint(c.toArray()))); Point tl = new Point(rectangulos.get(id_cuadrado).tl().x - 20, rectangulos.get(id_cuadrado).tl().y - 20); Point br = new Point(rectangulos.get(id_cuadrado).br().x + 20, rectangulos.get(id_cuadrado).br().y + 20); Core.rectangle(resultado, tl, br, new Scalar(255, 255, 255), -1); diagrama.addModulo(id_cuadrado, new Rect(tl, br)); Imgproc.drawContours(resultado, contornos, i, new Scalar(0, 0, 0), -1); id_cuadrado++; } } } return resultado; }
From source file:syncleus.dann.data.video.LKTracker.java
License:Apache License
/** * @return Pair of new, FILTERED, last and current POINTS, or null if it hasn't managed to track anything. *//*from w w w.j a va 2s . c o m*/ public Pair<Point[], Point[]> track(final Mat lastImg, final Mat currentImg, Point[] lastPoints) { final int size = lastPoints.length; final MatOfPoint2f currentPointsMat = new MatOfPoint2f(); final MatOfPoint2f pointsFBMat = new MatOfPoint2f(); final MatOfByte statusMat = new MatOfByte(); final MatOfFloat errSimilarityMat = new MatOfFloat(); final MatOfByte statusFBMat = new MatOfByte(); final MatOfFloat errSimilarityFBMat = new MatOfFloat(); //Forward-Backward tracking Video.calcOpticalFlowPyrLK(lastImg, currentImg, new MatOfPoint2f(lastPoints), currentPointsMat, statusMat, errSimilarityMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA); Video.calcOpticalFlowPyrLK(currentImg, lastImg, currentPointsMat, pointsFBMat, statusFBMat, errSimilarityFBMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA); final byte[] status = statusMat.toArray(); float[] errSimilarity = new float[lastPoints.length]; //final byte[] statusFB = statusFBMat.toArray(); final float[] errSimilarityFB = errSimilarityFBMat.toArray(); // compute the real FB error (relative to LAST points not the current ones... final Point[] pointsFB = pointsFBMat.toArray(); for (int i = 0; i < size; i++) { errSimilarityFB[i] = TLDUtil.norm(pointsFB[i], lastPoints[i]); } final Point[] currPoints = currentPointsMat.toArray(); // compute real similarity error errSimilarity = normCrossCorrelation(lastImg, currentImg, lastPoints, currPoints, status); //TODO errSimilarityFB has problem != from C++ // filter out points with fwd-back error > the median AND points with similarity error > median return filterPts(lastPoints, currPoints, errSimilarity, errSimilarityFB, status); }
From source file:tv.danmaku.ijk.media.example.activities.VideoActivity.java
License:Apache License
public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba();/* w w w . j a v a 2s. c om*/ mGray = inputFrame.gray(); // return mRgba; // iThreshold = 10000; //Imgproc.blur(mRgba, mRgba, new Size(5,5)); Imgproc.GaussianBlur(mRgba, mRgba, new org.opencv.core.Size(3, 3), 1, 1); //Imgproc.medianBlur(mRgba, mRgba, 3); if (!mIsColorSelected) return mRgba; List<MatOfPoint> contours = mDetector.getContours(); mDetector.process(mRgba); Log.d(TAG, "Contours count: " + contours.size()); if (contours.size() <= 0) { return mRgba; } RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours.get(0).toArray())); double boundWidth = rect.size.width; double boundHeight = rect.size.height; int boundPos = 0; for (int i = 1; i < contours.size(); i++) { rect = Imgproc.minAreaRect(new MatOfPoint2f(contours.get(i).toArray())); if (rect.size.width * rect.size.height > boundWidth * boundHeight) { boundWidth = rect.size.width; boundHeight = rect.size.height; boundPos = i; } } Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contours.get(boundPos).toArray())); Imgproc.rectangle(mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0); Log.d(TAG, " Row start [" + (int) boundRect.tl().y + "] row end [" + (int) boundRect.br().y + "] Col start [" + (int) boundRect.tl().x + "] Col end [" + (int) boundRect.br().x + "]"); int rectHeightThresh = 0; double a = boundRect.br().y - boundRect.tl().y; a = a * 0.7; a = boundRect.tl().y + a; Log.d(TAG, " A [" + a + "] br y - tl y = [" + (boundRect.br().y - boundRect.tl().y) + "]"); //Core.rectangle( mRgba, boundRect.tl(), boundRect.br(), CONTOUR_COLOR, 2, 8, 0 ); Imgproc.rectangle(mRgba, boundRect.tl(), new Point(boundRect.br().x, a), CONTOUR_COLOR, 2, 8, 0); MatOfPoint2f pointMat = new MatOfPoint2f(); Imgproc.approxPolyDP(new MatOfPoint2f(contours.get(boundPos).toArray()), pointMat, 3, true); contours.set(boundPos, new MatOfPoint(pointMat.toArray())); MatOfInt hull = new MatOfInt(); MatOfInt4 convexDefect = new MatOfInt4(); Imgproc.convexHull(new MatOfPoint(contours.get(boundPos).toArray()), hull); if (hull.toArray().length < 3) return mRgba; Imgproc.convexityDefects(new MatOfPoint(contours.get(boundPos).toArray()), hull, convexDefect); List<MatOfPoint> hullPoints = new LinkedList<MatOfPoint>(); List<Point> listPo = new LinkedList<Point>(); for (int j = 0; j < hull.toList().size(); j++) { listPo.add(contours.get(boundPos).toList().get(hull.toList().get(j))); } MatOfPoint e = new MatOfPoint(); e.fromList(listPo); hullPoints.add(e); List<MatOfPoint> defectPoints = new LinkedList<MatOfPoint>(); List<Point> listPoDefect = new LinkedList<Point>(); for (int j = 0; j < convexDefect.toList().size(); j = j + 4) { Point farPoint = contours.get(boundPos).toList().get(convexDefect.toList().get(j + 2)); Integer depth = convexDefect.toList().get(j + 3); if (depth > iThreshold && farPoint.y < a) { listPoDefect.add(contours.get(boundPos).toList().get(convexDefect.toList().get(j + 2))); } Log.d(TAG, "defects [" + j + "] " + convexDefect.toList().get(j + 3)); } MatOfPoint e2 = new MatOfPoint(); e2.fromList(listPo); defectPoints.add(e2); Log.d(TAG, "hull: " + hull.toList()); Log.d(TAG, "defects: " + convexDefect.toList()); Imgproc.drawContours(mRgba, hullPoints, -1, CONTOUR_COLOR, 3); int defectsTotal = (int) convexDefect.total(); Log.d(TAG, "Defect total " + defectsTotal); this.numberOfFingers = listPoDefect.size(); if (this.numberOfFingers > 5) this.numberOfFingers = 5; mHandler.post(mUpdateFingerCountResults); for (Point p : listPoDefect) { Imgproc.circle(mRgba, p, 6, new Scalar(255, 0, 255)); } return mRgba; }
From source file:video.PictureAnalyser.java
public List<MatOfPoint> getConturs(Scalar low, Scalar high, Mat img) { Mat imgThresholded = new Mat(); Mat imgThresholded2 = new Mat(); Core.inRange(img, low, high, imgThresholded); if (low.val[0] < 0) { low.val[0] = 180 + low.val[0]; high.val[0] = 179; Core.inRange(img, low, high, imgThresholded2); Core.bitwise_or(imgThresholded, imgThresholded2, imgThresholded); }// w w w. jav a 2 s . co m if (high.val[0] > 179) { low.val[0] = 0; high.val[0] = high.val[0] - 180; Core.inRange(img, low, high, imgThresholded2); Core.bitwise_or(imgThresholded, imgThresholded2, imgThresholded); } List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); int dilation_size = 3; Mat element1 = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * dilation_size + 1, 2 * dilation_size + 1)); Imgproc.dilate(imgThresholded, imgThresholded, element1); Imgproc.findContours(imgThresholded, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); MatOfPoint2f approxCurve = new MatOfPoint2f(); for (int i = 0; i < contours.size(); i++) { MatOfPoint2f contour2f = new MatOfPoint2f(contours.get(i).toArray()); double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02; Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true); MatOfPoint points = new MatOfPoint(approxCurve.toArray()); Rect rect = Imgproc.boundingRect(points); int area = (rect.width) * (rect.height); //tester og arealet er for smt if (area > 500) { } else { contours.remove(i); i--; } } return contours; }