List of usage examples for org.opencv.core MatOfPoint2f MatOfPoint2f
public MatOfPoint2f()
From source file:syncleus.dann.data.video.LKTracker.java
License:Apache License
/** * @return Pair of new, FILTERED, last and current POINTS, or null if it hasn't managed to track anything. *//*from www. j ava 2 s. c om*/ public Pair<Point[], Point[]> track(final Mat lastImg, final Mat currentImg, Point[] lastPoints) { final int size = lastPoints.length; final MatOfPoint2f currentPointsMat = new MatOfPoint2f(); final MatOfPoint2f pointsFBMat = new MatOfPoint2f(); final MatOfByte statusMat = new MatOfByte(); final MatOfFloat errSimilarityMat = new MatOfFloat(); final MatOfByte statusFBMat = new MatOfByte(); final MatOfFloat errSimilarityFBMat = new MatOfFloat(); //Forward-Backward tracking Video.calcOpticalFlowPyrLK(lastImg, currentImg, new MatOfPoint2f(lastPoints), currentPointsMat, statusMat, errSimilarityMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA); Video.calcOpticalFlowPyrLK(currentImg, lastImg, currentPointsMat, pointsFBMat, statusFBMat, errSimilarityFBMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA); final byte[] status = statusMat.toArray(); float[] errSimilarity = new float[lastPoints.length]; //final byte[] statusFB = statusFBMat.toArray(); final float[] errSimilarityFB = errSimilarityFBMat.toArray(); // compute the real FB error (relative to LAST points not the current ones... final Point[] pointsFB = pointsFBMat.toArray(); for (int i = 0; i < size; i++) { errSimilarityFB[i] = TLDUtil.norm(pointsFB[i], lastPoints[i]); } final Point[] currPoints = currentPointsMat.toArray(); // compute real similarity error errSimilarity = normCrossCorrelation(lastImg, currentImg, lastPoints, currPoints, status); //TODO errSimilarityFB has problem != from C++ // filter out points with fwd-back error > the median AND points with similarity error > median return filterPts(lastPoints, currPoints, errSimilarity, errSimilarityFB, status); }
From source file:video.PictureAnalyser.java
public List<MatOfPoint> getConturs(Scalar low, Scalar high, Mat img) { Mat imgThresholded = new Mat(); Mat imgThresholded2 = new Mat(); Core.inRange(img, low, high, imgThresholded); if (low.val[0] < 0) { low.val[0] = 180 + low.val[0]; high.val[0] = 179; Core.inRange(img, low, high, imgThresholded2); Core.bitwise_or(imgThresholded, imgThresholded2, imgThresholded); }//from www .j ava 2s .c o m if (high.val[0] > 179) { low.val[0] = 0; high.val[0] = high.val[0] - 180; Core.inRange(img, low, high, imgThresholded2); Core.bitwise_or(imgThresholded, imgThresholded2, imgThresholded); } List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); int dilation_size = 3; Mat element1 = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * dilation_size + 1, 2 * dilation_size + 1)); Imgproc.dilate(imgThresholded, imgThresholded, element1); Imgproc.findContours(imgThresholded, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); MatOfPoint2f approxCurve = new MatOfPoint2f(); for (int i = 0; i < contours.size(); i++) { MatOfPoint2f contour2f = new MatOfPoint2f(contours.get(i).toArray()); double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02; Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true); MatOfPoint points = new MatOfPoint(approxCurve.toArray()); Rect rect = Imgproc.boundingRect(points); int area = (rect.width) * (rect.height); //tester og arealet er for smt if (area > 500) { } else { contours.remove(i); i--; } } return contours; }
From source file:View.Signature.java
public static int sift(String routeVal, String route, String n_img1, String n_img2, String extension) { String bookObject = routeVal + n_img2 + extension; String bookScene = route + n_img1 + extension; //System.out.println("Iniciando SIFT"); //java.lang.System.out.print("Abriendo imagenes | "); Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT); //java.lang.System.out.print("Encontrar keypoints con SIFT | "); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT); //java.lang.System.out.print("Computando descriptores | "); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); //java.lang.System.out.print("Dibujando keypoints en imagen base | "); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); //java.lang.System.out.print("Detectando keypoints en imagen base | "); featureDetector.detect(sceneImage, sceneKeyPoints); //java.lang.System.out.print("Computando descriptores en imagen base | "); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); //java.lang.System.out.print("Encontrando matches entre imagenes | "); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); //java.lang.System.out.println("Calculando buenos matches"); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; java.lang.System.out.println(matches.size()); for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0];/* w w w. j a va 2 s . c o m*/ DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 7) { //java.lang.System.out.println("Match enontrado!!! Matches: "+goodMatchesList.size()); //if(goodMatchesList.size()>max){ //cambio = 1; //} List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); //System.out.println("Transforming object corners to scene corners..."); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); //java.lang.System.out.println("Dibujando imagen de coincidencias"); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); String n_outputImage = route + "results\\" + n_img2 + "_outputImage_sift" + extension; String n_matchoutput = route + "results\\" + n_img2 + "_matchoutput_sift" + extension; String n_img = route + "results\\" + n_img2 + "_sift" + extension; Highgui.imwrite(n_outputImage, outputImage); Highgui.imwrite(n_matchoutput, matchoutput); //Highgui.imwrite(n_img, img); java.lang.System.out.println(goodMatches.size().height); double result = goodMatches.size().height * 100 / matches.size(); java.lang.System.out.println((int) result); //double result =goodMatches.size().height; if (result > 100) { return 100; } else if (result <= 100 && result > 85) { return 85; } else if (result <= 85 && result > 50) { return 50; } else if (result <= 50 && result > 25) { return 25; } else { return 0; } } else { //java.lang.System.out.println("Firma no encontrada"); } return 0; //System.out.println("Terminando SIFT"); }
From source file:View.SignatureLib.java
public static int sift(String routeRNV, String routeAdherent) { String bookObject = routeAdherent; String bookScene = routeRNV;/* ww w . ja v a 2s . c o m*/ //System.out.println("Iniciando SIFT"); //java.lang.System.out.print("Abriendo imagenes | "); Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT); //java.lang.System.out.print("Encontrar keypoints con SIFT | "); featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT); //java.lang.System.out.print("Computando descriptores | "); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); // Create the matrix for output image. Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); //java.lang.System.out.print("Dibujando keypoints en imagen base | "); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); // Match object image with the scene image MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); //java.lang.System.out.print("Detectando keypoints en imagen base | "); featureDetector.detect(sceneImage, sceneKeyPoints); //java.lang.System.out.print("Computando descriptores en imagen base | "); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); //java.lang.System.out.println(sceneDescriptors); if (sceneDescriptors.empty()) { java.lang.System.out.println("Objeto no encontrado"); return 0; } descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); //java.lang.System.out.println("Calculando buenos matches"); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } if (goodMatchesList.size() >= 7) { max = goodMatchesList.size(); List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); //System.out.println("Transforming object corners to scene corners..."); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR); Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 4); Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 4); //java.lang.System.out.println("Dibujando imagen de coincidencias"); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); String n_outputImage = "../pre/outputImage_sift.jpg"; String n_matchoutput = "../pre/matchoutput_sift.jpg"; String n_img = "../pre/sift.jpg"; Highgui.imwrite(n_outputImage, outputImage); Highgui.imwrite(n_matchoutput, matchoutput); Highgui.imwrite(n_img, img); java.lang.System.out.println(goodMatches.size().height); double result = goodMatches.size().height;//*100/matches.size(); int score = 0; if (result > 26) { score = 100; } else if (result <= 26 && result > 22) { score = 85; } else if (result <= 22 && result > 17) { score = 50; } else if (result <= 17 && result > 11) { score = 25; } else { score = 0; } java.lang.System.out.println("Score: " + score); return score; } else { java.lang.System.out.println("Objeto no encontrado"); return 0; } //System.out.println("Terminando SIFT"); }
From source file:vinylsleevedetection.Analyze.java
public void Check() { count = 1;//from w w w . j av a 2 s . c o m //load openCV library System.loadLibrary(Core.NATIVE_LIBRARY_NAME); //for loop to compare source images to user image for (int j = 1; j < 4; j++) { //source image location (record sleeve) String Object = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Source\\" + j + ".jpg"; //user image location String Scene = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\camera.jpg"; //load images Mat objectImage = Imgcodecs.imread(Object, Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImage = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR); //use BRISK feature detection MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.BRISK); //perform feature detection on source image featureDetector.detect(objectImage, objectKeyPoints); KeyPoint[] keypoints = objectKeyPoints.toArray(); //use descriptor extractor MatOfKeyPoint objectDescriptors = new MatOfKeyPoint(); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK); descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors); Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Scalar newKeypointColor = new Scalar(255, 0, 0); Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint(); featureDetector.detect(sceneImage, sceneKeyPoints); descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors); Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Imgcodecs.CV_LOAD_IMAGE_COLOR); Scalar matchestColor = new Scalar(0, 255, 0); List<MatOfDMatch> matches = new LinkedList<>(); DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2); LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>(); float nndrRatio = 0.7f; for (int i = 0; i < matches.size(); i++) { MatOfDMatch matofDMatch = matches.get(i); DMatch[] dmatcharray = matofDMatch.toArray(); DMatch m1 = dmatcharray[0]; DMatch m2 = dmatcharray[1]; if (m1.distance <= m2.distance * nndrRatio) { goodMatchesList.addLast(m1); } } //if the number of good mathces is more than 150 a match is found if (goodMatchesList.size() > 150) { System.out.println("Object Found"); List<KeyPoint> objKeypointlist = objectKeyPoints.toList(); List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList(); LinkedList<Point> objectPoints = new LinkedList<>(); LinkedList<Point> scenePoints = new LinkedList<>(); for (int i = 0; i < goodMatchesList.size(); i++) { objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt); scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt); } MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f(); objMatOfPoint2f.fromList(objectPoints); MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f(); scnMatOfPoint2f.fromList(scenePoints); Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3); Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2); Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2); obj_corners.put(0, 0, new double[] { 0, 0 }); obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 }); obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() }); obj_corners.put(3, 0, new double[] { 0, objectImage.rows() }); Core.perspectiveTransform(obj_corners, scene_corners, homography); Mat img = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR); //draw a green square around the matched object Imgproc.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)), new Scalar(0, 255, 0), 10); Imgproc.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)), new Scalar(0, 255, 0), 10); MatOfDMatch goodMatches = new MatOfDMatch(); goodMatches.fromList(goodMatchesList); Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches, matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2); //output image with match, image of the match locations and keypoints image String folder = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\"; Imgcodecs.imwrite(folder + "outputImage.jpg", outputImage); Imgcodecs.imwrite(folder + "matchoutput.jpg", matchoutput); Imgcodecs.imwrite(folder + "found.jpg", img); count = j; break; } else { System.out.println("Object Not Found"); count = 0; } } }