List of usage examples for org.opencv.core MatOfPoint2f fromList
public void fromList(List<Point> lp)
From source file:org.lasarobotics.vision.detection.ObjectDetection.java
License:Open Source License
/** * Draw the object's location//from w w w.j ava 2 s. c o m * * @param output Image to draw on * @param objectAnalysis Object analysis information * @param sceneAnalysis Scene analysis information */ public static void drawObjectLocation(Mat output, ObjectAnalysis objectAnalysis, SceneAnalysis sceneAnalysis) { List<Point> ptsObject = new ArrayList<>(); List<Point> ptsScene = new ArrayList<>(); KeyPoint[] keypointsObject = objectAnalysis.keypoints.toArray(); KeyPoint[] keypointsScene = sceneAnalysis.keypoints.toArray(); DMatch[] matches = sceneAnalysis.matches.toArray(); for (DMatch matche : matches) { //Get the keypoints from these matches ptsObject.add(keypointsObject[matche.queryIdx].pt); ptsScene.add(keypointsScene[matche.trainIdx].pt); } MatOfPoint2f matObject = new MatOfPoint2f(); matObject.fromList(ptsObject); MatOfPoint2f matScene = new MatOfPoint2f(); matScene.fromList(ptsScene); //Calculate homography of object in scene Mat homography = Calib3d.findHomography(matObject, matScene, Calib3d.RANSAC, 5.0f); //Create the unscaled array of corners, representing the object size Point cornersObject[] = new Point[4]; cornersObject[0] = new Point(0, 0); cornersObject[1] = new Point(objectAnalysis.object.cols(), 0); cornersObject[2] = new Point(objectAnalysis.object.cols(), objectAnalysis.object.rows()); cornersObject[3] = new Point(0, objectAnalysis.object.rows()); Point[] cornersSceneTemp = new Point[0]; MatOfPoint2f cornersSceneMatrix = new MatOfPoint2f(cornersSceneTemp); MatOfPoint2f cornersObjectMatrix = new MatOfPoint2f(cornersObject); //Transform the object coordinates to the scene coordinates by the homography matrix Core.perspectiveTransform(cornersObjectMatrix, cornersSceneMatrix, homography); //Mat transform = Imgproc.getAffineTransform(cornersObjectMatrix, cornersSceneMatrix); //Draw the lines of the object on the scene Point[] cornersScene = cornersSceneMatrix.toArray(); final ColorRGBA lineColor = new ColorRGBA("#00ff00"); Drawing.drawLine(output, new Point(cornersScene[0].x + objectAnalysis.object.cols(), cornersScene[0].y), new Point(cornersScene[1].x + objectAnalysis.object.cols(), cornersScene[1].y), lineColor, 5); Drawing.drawLine(output, new Point(cornersScene[1].x + objectAnalysis.object.cols(), cornersScene[1].y), new Point(cornersScene[2].x + objectAnalysis.object.cols(), cornersScene[2].y), lineColor, 5); Drawing.drawLine(output, new Point(cornersScene[2].x + objectAnalysis.object.cols(), cornersScene[2].y), new Point(cornersScene[3].x + objectAnalysis.object.cols(), cornersScene[3].y), lineColor, 5); Drawing.drawLine(output, new Point(cornersScene[3].x + objectAnalysis.object.cols(), cornersScene[3].y), new Point(cornersScene[0].x + objectAnalysis.object.cols(), cornersScene[0].y), lineColor, 5); }
From source file:overwatchteampicker.OverwatchTeamPicker.java
public static ReturnValues findImage(String template, String source, int flag) { File lib = null;// w w w. jav a 2s . c o m BufferedImage image = null; try { image = ImageIO.read(new File(source)); } catch (Exception e) { e.printStackTrace(); } String os = System.getProperty("os.name"); String bitness = System.getProperty("sun.arch.data.model"); if (os.toUpperCase().contains("WINDOWS")) { if (bitness.endsWith("64")) { lib = new File("C:\\Users\\POWERUSER\\Downloads\\opencv\\build\\java\\x64\\" + System.mapLibraryName("opencv_java2413")); } else { lib = new File("libs//x86//" + System.mapLibraryName("opencv_java2413")); } } System.load(lib.getAbsolutePath()); String tempObject = "images\\hero_templates\\" + template + ".png"; String source_pic = source; Mat objectImage = Highgui.imread(tempObject, Highgui.CV_LOAD_IMAGE_GRAYSCALE); Mat sceneImage = Highgui.imread(source_pic, Highgui.CV_LOAD_IMAGE_GRAYSCALE); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SURF); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); featureDetector.detect(sceneImage, sceneKeyPoints); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 25); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = .78f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 4) { List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(source_pic, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 255), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(255, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); if (new Point(scene_corners.get(0, 0)).x < new Point(scene_corners.get(1, 0)).x && new Point(scene_corners.get(0, 0)).y < new Point(scene_corners.get(2, 0)).y) { System.out.println("found " + template); Highgui.imwrite("points.jpg", outputImage); Highgui.imwrite("matches.jpg", matchoutput); Highgui.imwrite("final.jpg", img); if (flag == 0) { ReturnValues retVal = null; int y = (int) new Point(scene_corners.get(3, 0)).y; int yHeight = (int) new Point(scene_corners.get(3, 0)).y - (int) new Point(scene_corners.get(2, 0)).y; if (y < image.getHeight() * .6) { //if found hero is in upper half of image then return point 3,0 retVal = new ReturnValues(y + (int) (image.getHeight() * .01), yHeight); } else { //if found hero is in lower half of image then return point 2,0 y = (int) new Point(scene_corners.get(2, 0)).y; retVal = new ReturnValues(y + (int) (image.getHeight() * .3), yHeight); } return retVal; } else if (flag == 1) { int[] xPoints = new int[4]; int[] yPoints = new int[4]; xPoints[0] = (int) (new Point(scene_corners.get(0, 0)).x); xPoints[1] = (int) (new Point(scene_corners.get(1, 0)).x); xPoints[2] = (int) (new Point(scene_corners.get(2, 0)).x); xPoints[3] = (int) (new Point(scene_corners.get(3, 0)).x); yPoints[0] = (int) (new Point(scene_corners.get(0, 0)).y); yPoints[1] = (int) (new Point(scene_corners.get(1, 0)).y); yPoints[2] = (int) (new Point(scene_corners.get(2, 0)).y); yPoints[3] = (int) (new Point(scene_corners.get(3, 0)).y); ReturnValues retVal = new ReturnValues(xPoints, yPoints); return retVal; } } } return null; }
From source file:simeav.filtros.instanciaciones.DetectorModulosEstandar.java
@Override public Mat detectarModulos(Mat original, Diagrama diagrama) { Imgproc.blur(original, original, new Size(15, 15)); original = Utils.dilate(original);//from ww w .j av a 2 s. co m Mat jerarquia = new Mat(); ArrayList<MatOfPoint> contornos = new ArrayList<>(); Imgproc.findContours(original.clone(), contornos, jerarquia, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); ArrayList<MatOfPoint> cp = new ArrayList<>(contornos.size()); Map<Integer, Rect> rectangulos = new HashMap<>(); Integer id_cuadrado = 0; Mat resultado = Mat.zeros(original.size(), CvType.CV_8U); for (int i = contornos.size() - 1; i >= 0; i--) { if (jerarquia.get(0, i)[3] > -1) { MatOfPoint2f contorno2f = new MatOfPoint2f(); contorno2f.fromList(contornos.get(i).toList()); MatOfPoint2f c = new MatOfPoint2f(); Imgproc.approxPolyDP(contorno2f, c, 3, true); cp.add(new MatOfPoint(c.toArray())); int lados = cp.get(cp.size() - 1).height(); if ((4 <= lados) && lados < 12) { rectangulos.put(id_cuadrado, Imgproc.boundingRect(new MatOfPoint(c.toArray()))); Point tl = new Point(rectangulos.get(id_cuadrado).tl().x - 20, rectangulos.get(id_cuadrado).tl().y - 20); Point br = new Point(rectangulos.get(id_cuadrado).br().x + 20, rectangulos.get(id_cuadrado).br().y + 20); Core.rectangle(resultado, tl, br, new Scalar(255, 255, 255), -1); diagrama.addModulo(id_cuadrado, new Rect(tl, br)); Imgproc.drawContours(resultado, contornos, i, new Scalar(0, 0, 0), -1); id_cuadrado++; } } } return resultado; }
From source file:simeav.filtros.instanciaciones.SeparadorTextoEstandar.java
@Override public void separarTexto(Mat origin, int umbral_radio) { Mat original = origin.clone();//from ww w. j a va2s. c om imagenSinTexto = original.clone(); imagenTexto = Mat.zeros(original.size(), CvType.CV_8U); imagenTexto = imagenTexto.setTo(new Scalar(255, 255, 255)); Mat imGrises = new Mat(); Mat bw = new Mat(); Imgproc.cvtColor(original, imGrises, Imgproc.COLOR_BGR2GRAY); Imgproc.GaussianBlur(imGrises, bw, new Size(1, 1), 0); Imgproc.threshold(bw, bw, 200, 250, Imgproc.THRESH_BINARY_INV); ArrayList<MatOfPoint> contornos = Utils.detectarContornos(bw); for (int i = 0; i < contornos.size(); i++) { MatOfPoint2f contorno2f = new MatOfPoint2f(); contorno2f.fromList(contornos.get(i).toList()); float[] radius = new float[1]; Point centro = new Point(); Imgproc.minEnclosingCircle(contorno2f, centro, radius); double a = Imgproc.contourArea(contornos.get(i)); if (radius[0] <= umbral_radio) { Imgproc.drawContours(imagenSinTexto, contornos, i, new Scalar(255, 255, 255), -1); Imgproc.drawContours(imagenTexto, contornos, i, new Scalar(0, 0, 0), -1); } } }
From source file:View.Signature.java
public static int sift(String routeVal, String route, String n_img1, String n_img2, String extension) { String bookObject = routeVal + n_img2 + extension; String bookScene = route + n_img1 + extension; //System.out.println("Iniciando SIFT"); //java.lang.System.out.print("Abriendo imagenes | "); Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT); //java.lang.System.out.print("Encontrar keypoints con SIFT | "); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT); //java.lang.System.out.print("Computando descriptores | "); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); //java.lang.System.out.print("Dibujando keypoints en imagen base | "); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); //java.lang.System.out.print("Detectando keypoints en imagen base | "); featureDetector.detect(sceneImage, sceneKeyPoints); //java.lang.System.out.print("Computando descriptores en imagen base | "); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); //java.lang.System.out.print("Encontrando matches entre imagenes | "); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); //java.lang.System.out.println("Calculando buenos matches"); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; java.lang.System.out.println(matches.size()); for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0];//www .j a v a 2 s.co m DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 7) { //java.lang.System.out.println("Match enontrado!!! Matches: "+goodMatchesList.size()); //if(goodMatchesList.size()>max){ //cambio = 1; //} List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); //System.out.println("Transforming object corners to scene corners..."); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); //java.lang.System.out.println("Dibujando imagen de coincidencias"); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); String n_outputImage = route + "results\\" + n_img2 + "_outputImage_sift" + extension; String n_matchoutput = route + "results\\" + n_img2 + "_matchoutput_sift" + extension; String n_img = route + "results\\" + n_img2 + "_sift" + extension; Highgui.imwrite(n_outputImage, outputImage); Highgui.imwrite(n_matchoutput, matchoutput); //Highgui.imwrite(n_img, img); java.lang.System.out.println(goodMatches.size().height); double result = goodMatches.size().height * 100 / matches.size(); java.lang.System.out.println((int) result); //double result =goodMatches.size().height; if (result > 100) { return 100; } else if (result <= 100 && result > 85) { return 85; } else if (result <= 85 && result > 50) { return 50; } else if (result <= 50 && result > 25) { return 25; } else { return 0; } } else { //java.lang.System.out.println("Firma no encontrada"); } return 0; //System.out.println("Terminando SIFT"); }
From source file:View.SignatureLib.java
public static int sift(String routeRNV, String routeAdherent) { String bookObject = routeAdherent; String bookScene = routeRNV;//w w w . j a v a 2 s . c om //System.out.println("Iniciando SIFT"); //java.lang.System.out.print("Abriendo imagenes | "); Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT); //java.lang.System.out.print("Encontrar keypoints con SIFT | "); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT); //java.lang.System.out.print("Computando descriptores | "); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); //java.lang.System.out.print("Dibujando keypoints en imagen base | "); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); //java.lang.System.out.print("Detectando keypoints en imagen base | "); featureDetector.detect(sceneImage, sceneKeyPoints); //java.lang.System.out.print("Computando descriptores en imagen base | "); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); //java.lang.System.out.println(sceneDescriptors); if (sceneDescriptors.empty()) { java.lang.System.out.println("Objeto no encontrado"); return 0; } descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); //java.lang.System.out.println("Calculando buenos matches"); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 7) { max = goodMatchesList.size(); List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); //System.out.println("Transforming object corners to scene corners..."); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); //java.lang.System.out.println("Dibujando imagen de coincidencias"); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); String n_outputImage = "../pre/outputImage_sift.jpg"; String n_matchoutput = "../pre/matchoutput_sift.jpg"; String n_img = "../pre/sift.jpg"; Highgui.imwrite(n_outputImage, outputImage); Highgui.imwrite(n_matchoutput, matchoutput); Highgui.imwrite(n_img, img); java.lang.System.out.println(goodMatches.size().height); double result = goodMatches.size().height;//*100/matches.size(); int score = 0; if (result > 26) { score = 100; } else if (result <= 26 && result > 22) { score = 85; } else if (result <= 22 && result > 17) { score = 50; } else if (result <= 17 && result > 11) { score = 25; } else { score = 0; } java.lang.System.out.println("Score: " + score); return score; } else { java.lang.System.out.println("Objeto no encontrado"); return 0; } //System.out.println("Terminando SIFT"); }
From source file:vinylsleevedetection.Analyze.java
public void Check() { count = 1;/* w ww. ja va2s. c om*/ //load openCV library System.loadLibrary(Core.NATIVE_LIBRARY_NAME); //for loop to compare source images to user image for (int j = 1; j < 4; j++) { //source image location (record sleeve) String Object = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Source\\" + j + ".jpg"; //user image location String Scene = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\camera.jpg"; //load images Mat objectImage = Imgcodecs.imread(Object, Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR); //use BRISK feature detection MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.BRISK); //perform feature detection on source image featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); //use descriptor extractor MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); featureDetector.detect(sceneImage, sceneKeyPoints); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Imgcodecs.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } //if the number of good mathces is more than 150 a match is found if (goodMatchesList.size() > 150) { System.out.println("Object Found"); List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR); //draw a green square around the matched object Imgproc.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 10); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); //output image with match, image of the match locations and keypoints image String folder = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\"; Imgcodecs.imwrite(folder + "outputImage.jpg", outputImage); Imgcodecs.imwrite(folder + "matchoutput.jpg", matchoutput); Imgcodecs.imwrite(folder + "found.jpg", img); count = j; break; } else { System.out.println("Object Not Found"); count = 0; } } }