List of usage examples for org.opencv.core Mat get
public double[] get(int row, int col)
From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java
License:Open Source License
public void executePreprocessor() { OriginalImage originalImage = new OriginalImage(imgUrl); BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory(); OriginalMat originalMat = new OriginalMat(bufferedImage); Mat mat = originalMat.createGrayImage(); showImageResult(mat, "nh gc"); long blackCount = 0; long whiteCount = 0; for (int i = 0; i < mat.rows(); i++) { for (int j = 0; j < mat.cols(); j++) { double temp[] = mat.get(i, j); if (temp[0] > 230) whiteCount++;// ww w. ja v a2s .c om else if (temp[0] < 35) blackCount++; } } int index = 0; for (int i = 0; i < mat.rows(); i += 16) { for (int j = 0; j < mat.cols(); j += 8) { Rect rect = new Rect(new Point(i, j), new Size(8, 16)); index++; System.out.println(rect.toString()); } } System.out.println(index); ThresholdMat thresholdMat = new ThresholdMat(mat, 0, 255, Imgproc.THRESH_OTSU); Mat mat1 = thresholdMat.createMatResult(); if (blackCount > whiteCount) { showImageResult(mat1, "nh ly ly ngng"); CloseMat openMat = new CloseMat(mat1, Imgproc.MORPH_RECT, 5, 5, 1); Mat mat2 = openMat.createMatResult(); showImageResult(mat2, "Thut ton open"); } else { } }
From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java
License:Open Source License
public void licensePlateDetection() { OriginalImage originalImage = new OriginalImage(imgUrl); BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory(); OriginalMat originalMat = new OriginalMat(bufferedImage); // Step 1// w w w . j a v a 2 s . c o m Mat mat = originalMat.createGrayImage(); long blackCount = 0; long whiteCount = 0; for (int i = 0; i < mat.rows(); i++) { for (int j = 0; j < mat.cols(); j++) { double temp[] = mat.get(i, j); if (temp[0] > 230) whiteCount++; else if (temp[0] < 35) blackCount++; } } System.out.println("whiteCount: " + whiteCount); System.out.println("blackCount: " + blackCount); // ImageResult imageResult1 = new ImageResult(mat, // "GrayImage1"); // imageResult1.showResultImage(); // Step 2 BilateralFilteringMat bilateralFilteringMat = new BilateralFilteringMat(mat, 75, 75, 1); Mat mat2 = bilateralFilteringMat.createMatResult(); // ImageResult imageResult2 = new ImageResult(mat2, // "Gauss"); // imageResult2.showResultImage(); // Step 3 HistogramEqualizationMat histogramEqualizationMat = new HistogramEqualizationMat(mat2); Mat mat3 = histogramEqualizationMat.createMatResult(); // ImageResult imageResult3 = new ImageResult(mat3, // "HistogramEqualizationMat"); // imageResult3.showResultImage(); // Step 4 OpenMat openMat = new OpenMat(mat3, Imgproc.MORPH_RECT, 5, 5, 2.2); Mat mat4 = openMat.createMatResult(); // ImageResult imageResult4 = new ImageResult(mat4, // "OpenMat"); // imageResult4.showResultImage(); // Step 5 SubtractMat subtractMat = new SubtractMat(mat4, mat3); Mat mat5 = subtractMat.createMatResult(); // ImageResult imageResult5 = new ImageResult(mat5, // "SubtractMat"); // imageResult5.showResultImage(); // // Step 6 ThresholdMat thresholdMat = new ThresholdMat(mat5, 0, 255, Imgproc.THRESH_OTSU); // Mat mat6 = thresholdMat.createMatResult(); // ImageResult imageResult6 = new ImageResult(mat6, // "THRESH_OTSU"); // imageResult6.showResultImage(); // //Step 7 // CannyMat cannyMat = new CannyMat(mat6, 250, // 255); // // Mat mat7 = cannyMat.createMatResult(); // ImageResult imageResult7 = new ImageResult(mat7, // "GrayImage7"); // imageResult7.showResultImage(); // // //Step 8 // MorphologyMatBase morphologyMatBase = new MorphologyMatBase( // mat7, Imgproc.MORPH_RECT, 3, 3, 1); // Mat mat8 = morphologyMatBase.dilate(); // ImageResult imageResult8 = new ImageResult(mat8, // "GrayImage8"); // imageResult8.showResultImage(); // // //Step 9 // RectangleDetection rect = new RectangleDetection(mat8); // ImageResult imageResult = new // ImageResult(rect.executeRectangleDetection(), // "GrayImage9"); // imageResult.showResultImage(); }
From source file:org.lasarobotics.vision.util.color.Color.java
License:Open Source License
/** * Convert this color to a different colorspace and return a scalar * * @param to Colorspace to convert to//w ww . ja v a2 s . co m * @return Scalar in other colorspace */ public Scalar convertColorScalar(ColorSpace to) { if (getColorSpace() == to) return getScalar(); if (!getColorSpace().canConvertTo(to)) throw new IllegalArgumentException("Cannot convert color to the desired color space."); Scalar output = this.getScalar(); try { for (int i = 0; i < getColorSpace().getConversionsTo(to).length; i += 3) { int conversion = getColorSpace().getConversionsTo(to)[i]; int inputDim = getColorSpace().getConversionsTo(to)[i + 1]; int outputDim = getColorSpace().getConversionsTo(to)[i + 2]; Mat pointMatTo = new Mat(); Mat pointMatFrom = new Mat(1, 1, CvType.CV_8UC(inputDim), output); Imgproc.cvtColor(pointMatFrom, pointMatTo, conversion, outputDim); output = new Scalar(pointMatTo.get(0, 0)); pointMatTo.release(); pointMatFrom.release(); } } catch (Exception ignored) { throw new IllegalArgumentException("Cannot convert color to the desired color space."); } return output; }
From source file:org.openpnp.machine.reference.ReferenceCamera.java
License:Open Source License
private Mat rotate(Mat mat, double rotation) { if (rotation == 0D) { return mat; }//from w ww .jav a 2 s.c om // See: // http://stackoverflow.com/questions/22041699/rotate-an-image-without-cropping-in-opencv-in-c Point center = new Point(mat.width() / 2D, mat.height() / 2D); Mat mapMatrix = Imgproc.getRotationMatrix2D(center, rotation, 1.0); // determine bounding rectangle Rect bbox = new RotatedRect(center, mat.size(), rotation).boundingRect(); // adjust transformation matrix double[] cx = mapMatrix.get(0, 2); double[] cy = mapMatrix.get(1, 2); cx[0] += bbox.width / 2D - center.x; cy[0] += bbox.height / 2D - center.y; mapMatrix.put(0, 2, cx); mapMatrix.put(1, 2, cy); Mat dst = new Mat(bbox.width, bbox.height, mat.type()); Imgproc.warpAffine(mat, dst, mapMatrix, bbox.size(), Imgproc.INTER_LINEAR); mat.release(); mapMatrix.release(); return dst; }
From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java
License:Open Source License
/** * Attempt to find matches of the given template within the current camera * frame. Matches are returned as TemplateMatch objects which contain * a Location in Camera coordinates. The results are sorted best score * to worst score./*from w w w. j av a2s. c o m*/ * @param template * @return */ public List<TemplateMatch> getTemplateMatches(BufferedImage template) { // TODO: ROI BufferedImage image = camera.capture(); // Convert the camera image and template image to the same type. This // is required by the cvMatchTemplate call. template = OpenCvUtils.convertBufferedImage(template, BufferedImage.TYPE_BYTE_GRAY); image = OpenCvUtils.convertBufferedImage(image, BufferedImage.TYPE_BYTE_GRAY); Mat templateMat = OpenCvUtils.toMat(template); Mat imageMat = OpenCvUtils.toMat(image); Mat resultMat = new Mat(); Imgproc.matchTemplate(imageMat, templateMat, resultMat, Imgproc.TM_CCOEFF_NORMED); Mat debugMat = null; if (logger.isDebugEnabled()) { debugMat = imageMat.clone(); } MinMaxLocResult mmr = Core.minMaxLoc(resultMat); double maxVal = mmr.maxVal; // TODO: Externalize? double threshold = 0.7f; double corr = 0.85f; double rangeMin = Math.max(threshold, corr * maxVal); double rangeMax = maxVal; List<TemplateMatch> matches = new ArrayList<TemplateMatch>(); for (Point point : matMaxima(resultMat, rangeMin, rangeMax)) { TemplateMatch match = new TemplateMatch(); int x = point.x; int y = point.y; match.score = resultMat.get(y, x)[0] / maxVal; if (logger.isDebugEnabled()) { Core.rectangle(debugMat, new org.opencv.core.Point(x, y), new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()), new Scalar(255)); Core.putText(debugMat, "" + match.score, new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(255)); } Location offsets = getPixelCenterOffsets(x + (templateMat.cols() / 2), y + (templateMat.rows() / 2)); match.location = camera.getLocation().subtract(offsets); matches.add(match); } Collections.sort(matches, new Comparator<TemplateMatch>() { @Override public int compare(TemplateMatch o1, TemplateMatch o2) { return ((Double) o2.score).compareTo(o1.score); } }); saveDebugImage("template", templateMat); saveDebugImage("camera", imageMat); saveDebugImage("result", resultMat); saveDebugImage("debug", debugMat); return matches; }
From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java
License:Open Source License
static List<Point> matMaxima(Mat mat, double rangeMin, double rangeMax) { List<Point> locations = new ArrayList<Point>(); int rEnd = mat.rows() - 1; int cEnd = mat.cols() - 1; // CHECK EACH ROW MAXIMA FOR LOCAL 2D MAXIMA for (int r = 0; r <= rEnd; r++) { MinMaxState state = MinMaxState.BEFORE_INFLECTION; double curVal = mat.get(r, 0)[0]; for (int c = 1; c <= cEnd; c++) { double val = mat.get(r, c)[0]; if (val == curVal) { continue; } else if (curVal < val) { if (state == MinMaxState.BEFORE_INFLECTION) { // n/a } else { state = MinMaxState.BEFORE_INFLECTION; }//w w w .j av a2s . co m } else { // curVal > val if (state == MinMaxState.BEFORE_INFLECTION) { if (rangeMin <= curVal && curVal <= rangeMax) { // ROW // MAXIMA if (0 < r && (mat.get(r - 1, c - 1)[0] >= curVal || mat.get(r - 1, c)[0] >= curVal)) { // cout << "reject:r-1 " << r << "," << c-1 << // endl; // - x x // - - - // - - - } else if (r < rEnd && (mat.get(r + 1, c - 1)[0] > curVal || mat.get(r + 1, c)[0] > curVal)) { // cout << "reject:r+1 " << r << "," << c-1 << // endl; // - - - // - - - // - x x } else if (1 < c && (0 < r && mat.get(r - 1, c - 2)[0] >= curVal || mat.get(r, c - 2)[0] > curVal || r < rEnd && mat.get(r + 1, c - 2)[0] > curVal)) { // cout << "reject:c-2 " << r << "," << c-1 << // endl; // x - - // x - - // x - - } else { locations.add(new Point(c - 1, r)); } } state = MinMaxState.AFTER_INFLECTION; } else { // n/a } } curVal = val; } // PROCESS END OF ROW if (state == MinMaxState.BEFORE_INFLECTION) { if (rangeMin <= curVal && curVal <= rangeMax) { // ROW MAXIMA if (0 < r && (mat.get(r - 1, cEnd - 1)[0] >= curVal || mat.get(r - 1, cEnd)[0] >= curVal)) { // cout << "rejectEnd:r-1 " << r << "," << cEnd-1 << // endl; // - x x // - - - // - - - } else if (r < rEnd && (mat.get(r + 1, cEnd - 1)[0] > curVal || mat.get(r + 1, cEnd)[0] > curVal)) { // cout << "rejectEnd:r+1 " << r << "," << cEnd-1 << // endl; // - - - // - - - // - x x } else if (1 < r && mat.get(r - 1, cEnd - 2)[0] >= curVal || mat.get(r, cEnd - 2)[0] > curVal || r < rEnd && mat.get(r + 1, cEnd - 2)[0] > curVal) { // cout << "rejectEnd:cEnd-2 " << r << "," << cEnd-1 << // endl; // x - - // x - - // x - - } else { locations.add(new Point(cEnd, r)); } } } } return locations; }
From source file:org.surmon.pattern.detector.houghcircle.HoughCircleDetector.java
/** * {@inheritDoc}//from ww w. j av a 2 s . c o m * * Runs detection of circles in image. Hough detector parameters are taken * from declared fields. This method detects circle particles in given image. */ @Override public List<CircleParticle> detectIn(PatternImage image) { List<CircleParticle> circles = new ArrayList<>(); Mat mat = image.getPixels(); Mat rawCircles = new Mat(); Imgproc.GaussianBlur(mat, mat, new Size(ksize, ksize), sigma); Imgproc.HoughCircles(mat, rawCircles, Imgproc.CV_HOUGH_GRADIENT, dp, minDist, param1, param2, minRadius, maxRadius); // creates particle and assignes to image for (int i = 0; i < rawCircles.cols(); i++) { double[] var = rawCircles.get(0, i); CircleParticle circle = new CircleParticle(i, var[0], var[1], var[2]); circles.add(circle); } return circles; }
From source file:org.surmon.pattern.visualization.gl.utils.BufferUtils.java
public static final ByteBuffer createByteBuffer(List<Mat> obj, Dimension3D dim) { final int pixelsInBuffer = 1; final int width = dim.getWidth(); final int height = dim.getHeight(); final int depth = obj.size(); final int volume = width * height * depth; ByteBuffer bb = ByteBuffer.allocate(volume * pixelsInBuffer); for (Mat mat : obj) { for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { double[] pixel = mat.get(i, j); bb.put((byte) pixel[0]); }// w w w . j a v a 2s .c o m } } bb.rewind(); return bb; }
From source file:org.usfirst.frc.team2084.CMonster2016.vision.Target.java
License:Open Source License
/** * Creates a new possible target based on the specified blob and calculates * its score.//from w w w. j av a 2 s. c om * * @param p the shape of the possible target */ public Target(MatOfPoint contour, Mat grayImage) { // Simplify contour to make the corner finding algorithm work better MatOfPoint2f fContour = new MatOfPoint2f(); contour.convertTo(fContour, CvType.CV_32F); Imgproc.approxPolyDP(fContour, fContour, VisionParameters.getGoalApproxPolyEpsilon(), true); fContour.convertTo(contour, CvType.CV_32S); this.contour = contour; // Check area, and don't do any calculations if it is not valid if (validArea = validateArea()) { // Find a bounding rectangle RotatedRect rect = Imgproc.minAreaRect(fContour); Point[] rectPoints = new Point[4]; rect.points(rectPoints); for (int j = 0; j < rectPoints.length; j++) { Point rectPoint = rectPoints[j]; double minDistance = Double.MAX_VALUE; Point point = null; for (int i = 0; i < contour.rows(); i++) { Point contourPoint = new Point(contour.get(i, 0)); double dist = distance(rectPoint, contourPoint); if (dist < minDistance) { minDistance = dist; point = contourPoint; } } rectPoints[j] = point; } MatOfPoint2f rectMat = new MatOfPoint2f(rectPoints); // Refine the corners to improve accuracy Imgproc.cornerSubPix(grayImage, rectMat, new Size(4, 10), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1)); rectPoints = rectMat.toArray(); // Identify each corner SortedMap<Double, List<Point>> x = new TreeMap<>(); Arrays.stream(rectPoints).forEach((p) -> { List<Point> points; if ((points = x.get(p.x)) == null) { x.put(p.x, points = new LinkedList<>()); } points.add(p); }); int i = 0; for (Iterator<List<Point>> it = x.values().iterator(); it.hasNext();) { List<Point> s = it.next(); for (Point p : s) { switch (i) { case 0: topLeft = p; break; case 1: bottomLeft = p; break; case 2: topRight = p; break; case 3: bottomRight = p; } i++; } } // Organize corners if (topLeft.y > bottomLeft.y) { Point p = bottomLeft; bottomLeft = topLeft; topLeft = p; } if (topRight.y > bottomRight.y) { Point p = bottomRight; bottomRight = topRight; topRight = p; } // Create corners for centroid calculation corners = new MatOfPoint2f(rectPoints); // Calculate center Moments moments = Imgproc.moments(corners); center = new Point(moments.m10 / moments.m00, moments.m01 / moments.m00); // Put the points in the correct order for solvePNP rectPoints[0] = topLeft; rectPoints[1] = topRight; rectPoints[2] = bottomLeft; rectPoints[3] = bottomRight; // Recreate corners in the new order corners = new MatOfPoint2f(rectPoints); widthTop = distance(topLeft, topRight); widthBottom = distance(bottomLeft, bottomRight); width = (widthTop + widthBottom) / 2.0; heightLeft = distance(topLeft, bottomLeft); heightRight = distance(topRight, bottomRight); height = (heightLeft + heightRight) / 2.0; Mat tvec = new Mat(); // Calculate target's location Calib3d.solvePnP(OBJECT_POINTS, corners, CAMERA_MAT, DISTORTION_MAT, rotation, tvec, false, Calib3d.CV_P3P); // ======================================= // Position and Orientation Transformation // ======================================= double armAngle = VisionResults.getArmAngle(); // Flip y axis to point upward Core.multiply(tvec, SIGN_NORMALIZATION_MATRIX, tvec); // Shift origin to arm pivot point, on the robot's centerline CoordinateMath.translate(tvec, CAMERA_X_OFFSET, CAMERA_Y_OFFSET, ARM_LENGTH); // Align axes with ground CoordinateMath.rotateX(tvec, -armAngle); Core.add(rotation, new MatOfDouble(armAngle, 0, 0), rotation); // Shift origin to robot center of rotation CoordinateMath.translate(tvec, 0, ARM_PIVOT_Y_OFFSET, -ARM_PIVOT_Z_OFFSET); double xPosFeet = tvec.get(0, 0)[0]; double yPosFeet = tvec.get(1, 0)[0]; double zPosFeet = tvec.get(2, 0)[0]; // Old less effective aiming heading and distance calculation // double pixelsToFeet = TARGET_WIDTH / width; // distance = (TARGET_WIDTH * HighGoalProcessor.IMAGE_SIZE.width // / (2 * width ** Math.tan(VisionParameters.getFOVAngle() / 2))); // double xPosFeet = (center.x - (HighGoalProcessor.IMAGE_SIZE.width // / 2)) * pixelsToFeet; // double yPosFeet = -(center.y - // (HighGoalProcessor.IMAGE_SIZE.height / 2)) * pixelsToFeet; distance = Math.sqrt(xPosFeet * xPosFeet + zPosFeet * zPosFeet); position = new Point3(xPosFeet, yPosFeet, zPosFeet); xGoalAngle = Math.atan(xPosFeet / zPosFeet); yGoalAngle = Math.atan(yPosFeet / zPosFeet); validate(); score = calculateScore(); } else { valid = false; } }
From source file:org.vinesrobotics.bot.utils.opencv.OpenCvManager.java
License:Open Source License
public Scalar converScalarHsv2Rgba(Scalar hsvColor) { Mat pointMatRgba = new Mat(); Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor); Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4); return new Scalar(pointMatRgba.get(0, 0)); }