List of usage examples for org.opencv.core Mat rows
public int rows()
From source file:com.mycompany.linedetection.LineDetector.java
public void findLines() { Imgproc.Canny(img, edgeDetectedImg, 100, 200, 3, true); Mat lines = new Mat(); int width = img.width(); int height = img.height(); double diagonal = Math.sqrt(width * width + height * height); int minOfWidthHeight = (width < height) ? width : height; Imgproc.HoughLinesP(edgeDetectedImg, lines, 1, Math.PI / 180, minOfWidthHeight * 10 / 100, diagonal * 25 / 100, diagonal * 4 / 100); int firstN = (lines.rows() < 5) ? lines.rows() : 5; for (int x = 0; x < lines.rows(); x++) { double[] vec = lines.get(x, 0); double[] vec1 = lines.get(x, 1); double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3]; Point startPoint = new Point(x1, y1); Point endPoint = new Point(x2, y2); double angle_inv = horizontalLine.getAngle(new Line(x1, y1, x2, y2)); double angle = horizontalLine.getAngle(new Line(x2, y2, x1, y1)); if ((angle >= diagAngle1 - DIAGONAL_TRESHOLD && angle <= diagAngle1 + DIAGONAL_TRESHOLD) || (angle >= diagAngle2 - DIAGONAL_TRESHOLD && angle <= diagAngle2 + DIAGONAL_TRESHOLD) || (angle_inv >= diagAngle1 - DIAGONAL_TRESHOLD && angle_inv <= diagAngle1 + DIAGONAL_TRESHOLD) || (angle_inv >= diagAngle2 - DIAGONAL_TRESHOLD && angle_inv <= diagAngle2 + DIAGONAL_TRESHOLD)) { diagonalLineList.add(new Line(x1, y1, x2, y2)); Imgproc.line(img, startPoint, endPoint, new Scalar(255, 255, 0), 4); } else {// ww w.jav a 2 s . c om lineList.add(new Line(x1, y1, x2, y2)); } } Collections.sort(lineList, new Comparator<Line>() { @Override public int compare(Line l1, Line l2) { return (int) (l2.getLength() - l1.getLength()); } }); ArrayList arr = new ArrayList<Line>(); for (int i = 0; i < firstN + 1; i++) { if (lineList.size() >= firstN + 1) { double x1 = lineList.get(i).getX1(), y1 = lineList.get(i).getY1(), x2 = lineList.get(i).getX2(), y2 = lineList.get(i).getY2(); Point startPoint = new Point(x1, y1); Point endPoint = new Point(x2, y2); arr.add(lineList.get(i)); Imgproc.line(img, startPoint, endPoint, new Scalar(0, 0, 255), 3); } } lineList = arr; }
From source file:com.oetermann.imageclassifier.DescriptorExtractorWrapper.java
License:Open Source License
public List<Mat> readImages(List<String> files, boolean grayscale) { List<Mat> images = new ArrayList<>(); Mat mat; for (ListIterator<String> it = files.listIterator(); it.hasNext();) { String file = it.next();/*from w w w .ja v a 2s . c o m*/ mat = Imgcodecs.imread(file); if (mat.dims() > 0 && mat.cols() > 0 && mat.rows() > 0) { if (grayscale) { Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2GRAY); } images.add(mat); } else { it.remove(); System.out.println("Cannot read file: " + file); } } return images; }
From source file:com.oetermann.imageclassifier.Util.java
License:Open Source License
public static void saveMat(String path, Mat mat) { File file = new File(path).getAbsoluteFile(); file.getParentFile().mkdirs();// www .j a v a 2 s .com try { int rows = mat.rows(); int cols = mat.cols(); int type = mat.type(); Object data; switch (mat.type()) { case CvType.CV_8S: case CvType.CV_8U: data = new byte[(int) mat.total() * mat.channels()]; mat.get(0, 0, (byte[]) data); break; case CvType.CV_16S: case CvType.CV_16U: data = new short[(int) mat.total() * mat.channels()]; mat.get(0, 0, (short[]) data); break; case CvType.CV_32S: data = new int[(int) mat.total() * mat.channels()]; mat.get(0, 0, (int[]) data); break; case CvType.CV_32F: data = new float[(int) mat.total() * mat.channels()]; mat.get(0, 0, (float[]) data); break; case CvType.CV_64F: data = new double[(int) mat.total() * mat.channels()]; mat.get(0, 0, (double[]) data); break; default: data = null; } try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(path))) { oos.writeObject(rows); oos.writeObject(cols); oos.writeObject(type); oos.writeObject(data); oos.close(); } } catch (IOException | ClassCastException ex) { System.err.println("ERROR: Could not save mat to file: " + path); // Logger.getLogger(ImageClassifier.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.opencv.mouse.MouseMainFrame.java
private void jToggleButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jToggleButton1ActionPerformed try {/* w ww . j a va 2 s . co m*/ robot = new Robot(); } catch (AWTException e) { } t = new Thread() { public void run() { MatToBufImg matToBufferedImageConverter = new MatToBufImg(); //Utility class to convert Mat to Java's BufferedImage webCam = new VideoCapture(0); if (!webCam.isOpened()) { System.out.println("Kamera Ak Deil..!"); } else System.out.println("Kamera Ald --> " + webCam.toString()); Mat webcam_image = new Mat(480, 640, CvType.CV_8UC3); Mat hsv_image = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3); thresholded = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3, new Scalar(255, 255, 255)); if (webCam.isOpened()) { try { Thread.sleep(1000); } catch (InterruptedException ex) { } while (true) { try { webCam.read(webcam_image); } catch (Exception e) { System.out.println("Web Cam Kapal !"); } if (!webcam_image.empty()) { try { Thread.sleep(10); } catch (InterruptedException ex) { } // Mat inRangeResim = webcam_image.clone(); /* Mat inRangeResim = webcam_image.clone(); matToBufferedImageConverter.setMatrix(inRangeResim, ".jpg"); image =matToBufferedImageConverter.getBufferedImage(); Highgui.imwrite("D:\\bitirme.jpg", inRangeResim); */ // MatOfRect faceDetections = new MatOfRect(); Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV); //siyah hsv range 0 0 0 - 180 45 100 //hsvmavi Core.inRange(webcam_image, new Scalar(75,63,40), new Scalar(118,255,255), webcam_image); //rgb mavi // Core.inRange(webcam_image, new Scalar(50,0,0), new Scalar(255,0,0), webcam_image); //turuncu hsv Core.inRange(webcam_image, new Scalar(5,50,50), new Scalar(15,255,255), webcam_image); //Core.inRange(webcam_image, new Scalar(80,50,50), new Scalar(140,255,255), webcam_image); // Core.inRange(webcam_image, new Scalar(29,0,24), new Scalar(30,155,155), webcam_image); //hsv mavi // jSliderHmin.setValue(75); // jSliderSmin.setValue(63); // jSliderVmin.setValue(40); // jSliderHmax.setValue(118); // jSliderSmax.setValue(255); // jSliderVmax.setValue(255); // // jSliderHmin.setValue(0); // jSliderSmin.setValue(0); // jSliderVmin.setValue(0); // jSliderHmax.setValue(179); // jSliderSmax.setValue(39); // jSliderVmax.setValue(120); Core.inRange(hsv_image, new Scalar(100, 97, 206), new Scalar(120, 255, 255), thresholded); Imgproc.dilate(thresholded, thresholded, element); Imgproc.erode(thresholded, thresholded, element); Imgproc.dilate(thresholded, thresholded, element); Imgproc.erode(thresholded, thresholded, element); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); Imgproc.drawContours(thresholded, contours, -1, new Scalar(255.0, 255.0, 255.0), 5); for (int i = 0; i < contours.size(); i++) { // System.out.println(Imgproc.contourArea(contours.get(i))); // if (Imgproc.contourArea(contours.get(i)) > 1 ){ Rect rect = Imgproc.boundingRect(contours.get(i)); kesit = thresholded.submat(rect); //System.out.println(rect.height); // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){ // System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width); Core.rectangle(webcam_image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255)); //} //} if (rect.height > 15 && rect.width > 15) { System.out.println(rect.x + "\n" + rect.y); Core.circle(webcam_image, new Point(rect.x, rect.y), i, new Scalar(0, 255, 0)); robot.mouseMove((int) (rect.x * 3), (int) (rect.y * 2.25)); } } // Imgproc.cvtColor(webcam_image, webcam_image, Imgproc.COLOR_HSV2BGR); // hsv_image.convertTo(hsv_image, CvType.CV_32F); // Imgproc.Canny(thresholded, thresholded, 10, 20); // Core.bitwise_and(thresholded, webcam_image, webcam_image); //ise yarar // Imgproc.cvtColor(thresholded, thresholded, Imgproc.COLOR_GRAY2BGR); // Core.bitwise_and(thresholded, webcam_image, webcam_image); // webcam_image.copyTo(hsv_image, thresholded); // System.out.println("<------------------------------>"); // System.out.println("BGR: " +webcam_image.channels()+" Size : "+webcam_image.size()); // System.out.println("HSV :"+hsv_image.channels()+" Size: "+hsv_image.size()); // System.out.println("Thresold :"+thresholded.channels()+" Size : "+thresholded.size()); // System.out.println("<------------------------------>"); // matToBufferedImageConverter.setMatrix(webcam_image, ".jpg"); image = matToBufferedImageConverter.getBufferedImage(); g.drawImage(image, 0, 0, webcam_image.cols(), webcam_image.rows(), null); } else { System.out.println("Grnt yok!"); break; } } // webCam.release(); } } }; threadDurum = true; t.start(); }
From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java
License:Open Source License
private static byte[] computeImageBytesCount(Mat image, boolean raw) { int rawBytesCount = image.channels() * image.rows() * image.cols(); int simDocExtra = raw ? image.rows() : 0; return new byte[rawBytesCount + simDocExtra]; }
From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java
License:Open Source License
private static void fillByteArray(byte[] byteArray, Mat image, boolean raw) { int colsNb = image.cols(); int bytesPerPixel = image.channels(); int bytesPerRow = colsNb * bytesPerPixel + (raw ? 1 : 0); byte[] pixel = new byte[bytesPerPixel]; int magicNumberOffset = 0; for (int y = 0; y < image.rows(); y++) { for (int x = 0; x < colsNb; x++) { image.get(y, x, pixel);//from w w w . ja v a2 s . co m for (int z = 0; z < bytesPerPixel; z++) { byteArray[magicNumberOffset + y * bytesPerRow + x * bytesPerPixel + z] = pixel[z]; } } if (raw) { byteArray[magicNumberOffset + y * bytesPerRow + colsNb * bytesPerPixel] = SIMDOC_LINE_TERMINATION; } } }
From source file:com.raulh82vlc.face_detection_sample.opencv.domain.EyesDetectionInteractorImpl.java
License:Apache License
/** * Matches concrete point of the eye by using template with TM_SQDIFF_NORMED *//*from w w w. j a va 2s. c o m*/ private static void matchEye(Rect area, Mat builtTemplate, Mat matrixGray, Mat matrixRGBA) { Point matchLoc; try { // when there is not builtTemplate we skip it if (builtTemplate.cols() == 0 || builtTemplate.rows() == 0) { return; } Mat submatGray = matrixGray.submat(area); int cols = submatGray.cols() - builtTemplate.cols() + 1; int rows = submatGray.rows() - builtTemplate.rows() + 1; Mat outputTemplateMat = new Mat(cols, rows, CvType.CV_8U); Imgproc.matchTemplate(submatGray, builtTemplate, outputTemplateMat, Imgproc.TM_SQDIFF_NORMED); Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(outputTemplateMat); // when is difference in matching methods, the best match is max / min value matchLoc = minMaxLocResult.minLoc; Point matchLocTx = new Point(matchLoc.x + area.x, matchLoc.y + area.y); Point matchLocTy = new Point(matchLoc.x + builtTemplate.cols() + area.x, matchLoc.y + builtTemplate.rows() + area.y); FaceDrawerOpenCV.drawMatchedEye(matchLocTx, matchLocTy, matrixRGBA); } catch (Exception e) { e.printStackTrace(); } }
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
/** * Compute the rectangle where the searched picture is and the rotation angle between both images * Throw {@link ImageSearchException} if picture is not found * @return// w w w . ja va2 s. c om * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build */ public void detectCorrespondingZone() { Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); surf.detect(objectImageMat, objectKeyPoints); surf.detect(sceneImageMat, sceneKeyPoints); DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF); Mat objectDescriptor = new Mat(); Mat sceneDescriptor = new Mat(); surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor); surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor); try { Mat outImage = new Mat(); Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage); String tempFile = File.createTempFile("img", ".png").getAbsolutePath(); writeComparisonPictureToFile(tempFile, outImage); } catch (IOException e) { } // http://stackoverflow.com/questions/29828849/flann-for-opencv-java DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); MatOfDMatch matches = new MatOfDMatch(); if (objectKeyPoints.toList().isEmpty()) { throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: " + objectImage.getAbsolutePath()); } if (sceneKeyPoints.toList().isEmpty()) { throw new ImageSearchException( "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath()); } if (objectDescriptor.type() != CvType.CV_32F) { objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F); } if (sceneDescriptor.type() != CvType.CV_32F) { sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F); } matcher.match(objectDescriptor, sceneDescriptor, matches); double maxDist = 0; double minDist = 10000; for (int i = 0; i < objectDescriptor.rows(); i++) { double dist = matches.toList().get(i).distance; if (dist < minDist) { minDist = dist; } if (dist > maxDist) { maxDist = dist; } } logger.debug("-- Max dist : " + maxDist); logger.debug("-- Min dist : " + minDist); LinkedList<DMatch> goodMatches = new LinkedList<>(); MatOfDMatch gm = new MatOfDMatch(); for (int i = 0; i < objectDescriptor.rows(); i++) { if (matches.toList().get(i).distance < detectionThreshold) { goodMatches.addLast(matches.toList().get(i)); } } gm.fromList(goodMatches); Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch, Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS); if (goodMatches.isEmpty()) { throw new ImageSearchException("Cannot find matching zone"); } LinkedList<Point> objList = new LinkedList<>(); LinkedList<Point> sceneList = new LinkedList<>(); List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList(); List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList(); for (int i = 0; i < goodMatches.size(); i++) { objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt); sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt); } MatOfPoint2f obj = new MatOfPoint2f(); obj.fromList(objList); MatOfPoint2f scene = new MatOfPoint2f(); scene.fromList(sceneList); // Calib3d.RANSAC could be used instead of 0 Mat hg = Calib3d.findHomography(obj, scene, 0, 5); Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2); Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2); objectCorners.put(0, 0, new double[] { 0, 0 }); objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 }); objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() }); objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() }); Core.perspectiveTransform(objectCorners, sceneCorners, hg); // points of object Point po1 = new Point(objectCorners.get(0, 0)); Point po2 = new Point(objectCorners.get(1, 0)); Point po3 = new Point(objectCorners.get(2, 0)); Point po4 = new Point(objectCorners.get(3, 0)); // point of object in scene Point p1 = new Point(sceneCorners.get(0, 0)); // top left Point p2 = new Point(sceneCorners.get(1, 0)); // top right Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left logger.debug(po1); logger.debug(po2); logger.debug(po3); logger.debug(po4); logger.debug(p1); // top left logger.debug(p2); // top right logger.debug(p3); // bottom right logger.debug(p4); // bottom left if (debug) { try { // translate corners p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y }); p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y }); p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y }); p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y }); Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1); showResultingPicture(imgMatch); } catch (IOException e) { } } // check rotation angles checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4); // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270 reworkOnScenePoints(p1, p2, p3, p4); // check that aspect ratio of the detected height and width are the same checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4); recordDetectedRectangle(p1, p2, p3, p4); }
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
/** * Detect the object inside the scene// w ww . ja v a 2 s.c o m * We also search the scale of the scene from 20% to 120% scale by steps * steps are 10%, with 0.6 accuracy * then when a good match is found, we search around by 5% scale steps with 0.7 accuracy * then when a good match is found, we search around by 2.5% scale steps with 0.8 accuracy * * example: * first pass: scales are: 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200 * good matches are found around 600 and 700 * second pass: scales are 550, 600, 650, 700, 750 * good matches are found at 650 * third pass: scales are 625, 650, 675 * * The best match is at 675 */ public void detectExactZoneWithScale() { Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); List<TemplateMatchProperties> matches = Collections.synchronizedList(new ArrayList<>()); Map<Integer, Double> scaleSteps = new LinkedHashMap<>(); scaleSteps.put(100, 0.6); scaleSteps.put(50, 0.7); scaleSteps.put(25, 0.8); int currentStep = 100; Set<Integer> computedScales = new HashSet<>(); while (currentStep >= 25) { final double currentThreshold = scaleSteps.get(currentStep); // first loop Set<Integer> localScales = Collections.synchronizedSet(new HashSet<>()); if (currentStep == 100) { for (int scale = 200; scale < 1200; scale += currentStep) { localScales.add(scale); } } else { if (matches.isEmpty()) { throw new ImageSearchException("no matches"); } for (TemplateMatchProperties tmpM : matches) { if (tmpM.isActive()) { localScales.add(tmpM.getMatchScale() - currentStep); localScales.add(tmpM.getMatchScale() + currentStep); } } } ExecutorService executorService = Executors .newFixedThreadPool(Runtime.getRuntime().availableProcessors()); for (int scale : localScales) { if (computedScales.contains(scale)) { continue; } computedScales.add(scale); // resize to scale factor final int localScale = scale; Size sz = new Size(sceneImageMat.cols() * scale / 1000.0, sceneImageMat.rows() * localScale / 1000.0); // skip if resized image is smaller than object if (sz.width < objectImageMat.cols() || sz.height < objectImageMat.rows()) { continue; } executorService.submit(() -> { Mat resizeSceneImageMat = new Mat(); Imgproc.resize(sceneImageMat, resizeSceneImageMat, sz); try { TemplateMatchProperties match = detectExactZone2(resizeSceneImageMat, objectImageMat, localScale, currentThreshold); matches.add(match); } catch (ImageSearchException e) { } }); } executorService.shutdown(); try { executorService.awaitTermination(10, TimeUnit.SECONDS); } catch (Exception e) { logger.info("Could not compute scale within 10 seconds", e); } // shortcut if we find a very good match double cleanThreshold = currentThreshold; matches.sort((TemplateMatchProperties t1, TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue()))); if (!matches.isEmpty() && matches.get(0).getMatchValue() > 0.9) { cleanThreshold = 0.9; currentStep = Math.min(currentStep, 50); } currentStep = currentStep / 2; // clean matches from too low matching values for (TemplateMatchProperties t : matches) { if (t.getMatchValue() < cleanThreshold) { t.setActive(false); } } } // get the best match matches.sort((TemplateMatchProperties t1, TemplateMatchProperties t2) -> -(t1.getMatchValue().compareTo(t2.getMatchValue()))); if (!matches.isEmpty()) { TemplateMatchProperties bestMatch = matches.get(0); if (bestMatch.getMatchValue() < 1 - detectionThreshold) { throw new ImageSearchException( String.format("No match found for threshold %.2f, match found with value %.2f", 1 - detectionThreshold, bestMatch.getMatchValue())); } detectedRectangle = new Rectangle((int) (bestMatch.getMatchLoc().x / bestMatch.getDoubleScale()), (int) (bestMatch.getMatchLoc().y / bestMatch.getDoubleScale()), (int) (objectImageMat.rows() / bestMatch.getDoubleScale()), (int) (objectImageMat.cols() / bestMatch.getDoubleScale())); if (debug) { try { Imgproc.rectangle(sceneImageMat, new Point(detectedRectangle.x, detectedRectangle.y), new Point(detectedRectangle.x + detectedRectangle.width, detectedRectangle.y + detectedRectangle.height), new Scalar(0, 255, 0)); showResultingPicture(sceneImageMat); } catch (IOException e) { } } rotationAngle = 0; sizeRatio = detectedRectangle.width / (double) objectImageMat.cols(); } else { throw new ImageSearchException("no matching has been found"); } }
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
private MinMaxLocResult getBestTemplateMatching(int matchMethod, Mat sceneImageMat, Mat objectImageMat) { // / Create the result matrix int resultCols = sceneImageMat.cols() - objectImageMat.cols() + 1; int resultRows = sceneImageMat.rows() - objectImageMat.rows() + 1; Mat result = new Mat(resultRows, resultCols, CvType.CV_32FC1); // / Do the Matching and Normalize Imgproc.matchTemplate(sceneImageMat, objectImageMat, result, matchMethod); // / Localizing the best match with minMaxLoc return Core.minMaxLoc(result); }