List of usage examples for org.opencv.core Mat size
public Size size()
From source file:samples.FtcTestOpenCv.java
License:Open Source License
/** * This method rotate the image to the specified angle. * * @param src specifies the image to be rotated. * @param dst specifies the destination to put the rotated image. * @param angle specifies the rotation angle. *///from w ww. j av a 2 s .co m private void rotateImage(Mat src, Mat dst, double angle) { angle %= 360.0; if (angle == 0.0) { src.copyTo(dst); } else if (angle == 90.0 || angle == -270.0) { Core.transpose(src, dst); Core.flip(dst, dst, 1); } else if (angle == 180.0 || angle == -180.0) { Core.flip(src, dst, -1); } else if (angle == 270.0 || angle == -90.0) { Core.transpose(src, dst); Core.flip(dst, dst, 0); } else { Mat rotMat = Imgproc.getRotationMatrix2D(new Point(src.cols() / 2.0, src.rows() / 2.0), angle, 1.0); Imgproc.warpAffine(src, dst, rotMat, src.size()); } }
From source file:samples.LWF.java
private static void affine(Mat mat, double[][] from, double[][] to, double[][] coeficients, Mat lienzo, double escala, double gap) { // throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. // http://stackoverflow.com/questions/10100715/opencv-warping-from-one-triangle-to-another // https://www.learnopencv.com/warp-one-triangle-to-another-using-opencv-c-python/ // http://docs.opencv.org/2.4/doc/tutorials/imgproc/imgtrans/warp_affine/warp_affine.html MatOfPoint2f src_pf = new MatOfPoint2f(new Point(from[0][0], from[0][1]), new Point(from[1][0], from[1][1]), new Point(from[2][0], from[2][1])); MatOfPoint2f dst_pf = new MatOfPoint2f(new Point(to[0][0], to[0][1]), new Point(to[1][0], to[1][1]), new Point(to[2][0], to[2][1])); // https://www.learnopencv.com/warp-one-triangle-to-another-using-opencv-c-python/#download //how do I set up the position numbers in MatOfPoint2f here? // Mat perspective_matrix = Imgproc.getAffineTransform(src_pf, dst_pf); Rect r1 = Imgproc.boundingRect(new MatOfPoint(new Point(from[0][0], from[0][1]), new Point(from[1][0], from[1][1]), new Point(from[2][0], from[2][1]))); Rect r2 = Imgproc.boundingRect(new MatOfPoint(new Point(to[0][0], to[0][1]), new Point(to[1][0], to[1][1]), new Point(to[2][0], to[2][1]))); MatOfPoint2f tri1Cropped = new MatOfPoint2f(new Point(from[0][0] - r1.x, from[0][1] - r1.y), new Point(from[1][0] - r1.x, from[1][1] - r1.y), new Point(from[2][0] - r1.x, from[2][1] - r1.y)); MatOfPoint tri2CroppedInt = new MatOfPoint(new Point(to[0][0] - r2.x, to[0][1] - r2.y), new Point(to[1][0] - r2.x, to[1][1] - r2.y), new Point(to[2][0] - r2.x, to[2][1] - r2.y)); MatOfPoint2f tri2Cropped = new MatOfPoint2f(new Point((to[0][0] - r2.x), (to[0][1] - r2.y)), new Point((to[1][0] - r2.x), (to[1][1] - r2.y)), new Point((to[2][0] - r2.x), (to[2][1] - r2.y))); // for (int i = 0; i < 3; i++) { // // tri1Cropped.push_back(new MatOfPoint(new Point(from[i][0] - r1.x, from[i][1] - r1.y))); // new Point( from[i][0] - r1.x, from[i][1]- r1.y) ); // //tri2Cropped.push_back(new MatOfPoint(new Point(to[i][0] - r2.x, to[i][1] - r2.y))); //// w w w.j a v a 2 s . c om // // fillConvexPoly needs a vector of Point and not Point2f // // tri2CroppedInt.push_back(new MatOfPoint2f(new Point((int) (to[i][0] - r2.x), (int) (to[i][1] - r2.y)))); // // } // Apply warpImage to small rectangular patches Mat img1Cropped = mat.submat(r1); //img1(r1).copyTo(img1Cropped); // Given a pair of triangles, find the affine transform. Mat warpMat = Imgproc.getAffineTransform(tri1Cropped, tri2Cropped); // Mat bbb = warpMat.mul(tri1Cropped); // // System.out.println( warpMat.dump() ); // System.out.println( tri2Cropped.dump() ); // System.out.println( bbb.dump() ); // Apply the Affine Transform just found to the src image Mat img2Cropped = Mat.zeros(r2.height, r2.width, img1Cropped.type()); Imgproc.warpAffine(img1Cropped, img2Cropped, warpMat, img2Cropped.size(), 0, Imgproc.INTER_LINEAR, new Scalar(Core.BORDER_TRANSPARENT)); //, 0, Imgproc.INTER_LINEAR, new Scalar(Core.BORDER_REFLECT_101)); // Get mask by filling triangle Mat mask = Mat.zeros(r2.height, r2.width, CvType.CV_8UC3); ///CV_8U CV_32FC3 Imgproc.fillConvexPoly(mask, tri2CroppedInt, new Scalar(1.0, 1.0, 1.0), 16, 0); // Copy triangular region of the rectangular patch to the output image // Core.multiply(img2Cropped,mask, img2Cropped); // // Core.multiply(mask, new Scalar(-1), mask); // Core.(mask,new Scalar(gap), mask); //Core.multiply(lienzo.submat(r2), (new Scalar(1.0,1.0,1.0)). - Core.multiply(mask,), lienzo.submat(r2)); // img2(r2) = img2(r2) + img2Cropped; // Core.subtract(Mat.ones(mask.height(), mask.width(), CvType.CV_8UC3), mask, mask); // Mat ff = ; // este Core.multiply(img2Cropped, mask, img2Cropped); //Core.multiply(lienzo.submat(r2), mask , lienzo.submat(r2)); Core.add(lienzo.submat(r2), img2Cropped, lienzo.submat(r2)); /* Mat bb = new Mat(mat, r2); bb.setTo(new Scalar(rnd.nextInt(),rnd.nextInt(),rnd.nextInt())); Core.multiply(bb,mask, bb); Core.multiply(lienzo.submat(r2), mask , lienzo.submat(r2)); Core.add(lienzo.submat(r2), bb, lienzo.submat(r2)); */ // lienzo.submat(r2).setTo(new Scalar(rnd.nextInt(),rnd.nextInt(),rnd.nextInt())); // // Imgproc.fillConvexPoly(lienzo, new MatOfPoint( // new Point(to[0][0] , to[0][1]), // new Point(to[1][0] , to[1][1]), // new Point(to[2][0] , to[2][1] )), new Scalar(1,1,1)); // img2Cropped.copyTo(lienzo); // return; // http://stackoverflow.com/questions/14111716/how-to-set-a-mask-image-for-grabcut-in-opencv // Imgproc.warpAffine(mat, lienzo, perspective_matrix, lienzo.size()); // Imgproc.getAffineTransform(null, null); /* // Find bounding rectangle for each triangle Rect r1 = boundingRect(tri1); Rect r2 = boundingRect(tri2); // Offset points by left top corner of the respective rectangles vector<Point2f> tri1Cropped, tri2Cropped; vector<Point> tri2CroppedInt; for(int i = 0; i < 3; i++) { tri1Cropped.push_back( Point2f( tri1[i].x - r1.x, tri1[i].y - r1.y) ); tri2Cropped.push_back( Point2f( tri2[i].x - r2.x, tri2[i].y - r2.y) ); // fillConvexPoly needs a vector of Point and not Point2f tri2CroppedInt.push_back( Point((int)(tri2[i].x - r2.x), (int)(tri2[i].y - r2.y)) ); } // Apply warpImage to small rectangular patches Mat img1Cropped; img1(r1).copyTo(img1Cropped); // Given a pair of triangles, find the affine transform. Mat warpMat = getAffineTransform( tri1Cropped, tri2Cropped ); // Apply the Affine Transform just found to the src image Mat img2Cropped = Mat::zeros(r2.height, r2.width, img1Cropped.type()); warpAffine( img1Cropped, img2Cropped, warpMat, img2Cropped.size(), INTER_LINEAR, BORDER_REFLECT_101); // Get mask by filling triangle Mat mask = Mat::zeros(r2.height, r2.width, CV_32FC3); fillConvexPoly(mask, tri2CroppedInt, Scalar(1.0, 1.0, 1.0), 16, 0); // Copy triangular region of the rectangular patch to the output image multiply(img2Cropped,mask, img2Cropped); multiply(img2(r2), Scalar(1.0,1.0,1.0) - mask, img2(r2)); img2(r2) = img2(r2) + img2Cropped;*/ }
From source file:servlets.FillAreaByScribble.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods.// ww w . j ava 2s . c o m * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { String imageForTextRecognition = request.getParameter("imageForTextRecognition") + ".png"; String isSingleRegion = request.getParameter("isSingleRegion"); boolean makeSingleRegion = isSingleRegion.toLowerCase().equals("true"); Mat original = ImageUtils.loadImage(imageForTextRecognition, request); Mat image = original.clone(); Mat mask = Mat.zeros(image.rows() + 2, image.cols() + 2, CvType.CV_8UC1); String samplingPoints = request.getParameter("samplingPoints"); Gson gson = new Gson(); Point[] tmpPoints = gson.fromJson(samplingPoints, Point[].class); ArrayList<Point> userPoints = new ArrayList<Point>(Arrays.asList(tmpPoints)); Mat userPointsImage = image.clone(); ArrayList<Mat> maskRegions = new ArrayList<>(); Random random = new Random(); int b = random.nextInt(256); int g = random.nextInt(256); int r = random.nextInt(256); Scalar newVal = new Scalar(b, g, r); FloodFillFacade floodFillFacade = new FloodFillFacade(); int k = 0; for (int i = 0; i < userPoints.size(); i++) { Point point = userPoints.get(i); image = floodFillFacade.fill(image, mask, (int) point.x, (int) point.y, newVal); Mat seedImage = original.clone(); Core.circle(seedImage, point, 9, new Scalar(0, 0, 255), -1); Core.putText(userPointsImage, "" + k, new Point(point.x + 5, point.y + 5), 3, 0.5, new Scalar(0, 0, 0)); // ImageUtils.saveImage(seedImage, "mask_" + k + "_seed" + imageForTextRecognition + ".png", request); if (!makeSingleRegion) { Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1)); Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_CLOSE, element, new Point(-1, -1), 3); Imgproc.resize(mask, mask, original.size()); } // ImageUtils.saveImage(mask, "mask_" + k + "" + imageForTextRecognition + ".png", request); Mat dilatedMask = new Mat(); int elementSide = 21; Mat element = new Mat(elementSide, elementSide, CvType.CV_8U, new Scalar(1)); Imgproc.morphologyEx(mask, dilatedMask, Imgproc.MORPH_DILATE, element, new Point(-1, -1), 1); Imgproc.resize(dilatedMask, dilatedMask, original.size()); // ImageUtils.saveImage(dilatedMask, "mask_" + k + "_dilated" + imageForTextRecognition + ".png", request); maskRegions.add(mask); if (!makeSingleRegion) { int totalRemovedPoints = filterPoints(userPoints, dilatedMask); if (totalRemovedPoints > 0) { i = -1; // so that the algorithm starts again at the first element of the userPoints array } } else { filterPoints(userPoints, mask); } // System.out.println("Total points after filtering:"); // System.out.println(userPoints.size()); if (!makeSingleRegion) { mask = Mat.zeros(original.rows() + 2, original.cols() + 2, CvType.CV_8UC1); } k++; } ArrayList<FindingResponse> findingResponses = new ArrayList<>(); if (makeSingleRegion) { Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1)); Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_CLOSE, element, new Point(-1, -1), 3); Imgproc.resize(mask, mask, image.size()); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(mask.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE); MatOfPoint biggestContour = contours.get(0); // getting the biggest contour double contourArea = Imgproc.contourArea(biggestContour); if (contours.size() > 1) { biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one } Point[] biggestContourPoints = biggestContour.toArray(); String path = "M " + (int) biggestContourPoints[0].x + " " + (int) biggestContourPoints[0].y + " "; for (int i = 1; i < biggestContourPoints.length; ++i) { Point v = biggestContourPoints[i]; path += "L " + (int) v.x + " " + (int) v.y + " "; } path += "Z"; // System.out.println("path:"); // System.out.println(path); Rect computedSearchWindow = Imgproc.boundingRect(biggestContour); Point massCenter = computedSearchWindow.tl(); Scalar meanColor = Core.mean(original, mask); // ImageUtils.saveImage(mask, "single_mask_" + imageForTextRecognition + ".png", request); FindingResponse findingResponse = new FindingResponse(path, meanColor, massCenter, -1, contourArea); findingResponses.add(findingResponse); } else { float imageArea = image.cols() * image.rows(); for (int j = 0; j < maskRegions.size(); j++) { Mat region = maskRegions.get(j); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(region.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE); MatOfPoint biggestContour = contours.get(0); // getting the biggest contour if (contours.size() > 1) { biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one } double contourArea = Imgproc.contourArea(biggestContour); if (contourArea / imageArea < 0.8) { // only areas less than 80% of that of the image are accepted Point[] biggestContourPoints = biggestContour.toArray(); String path = "M " + (int) biggestContourPoints[0].x + " " + (int) biggestContourPoints[0].y + " "; for (int i = 1; i < biggestContourPoints.length; ++i) { Point v = biggestContourPoints[i]; path += "L " + (int) v.x + " " + (int) v.y + " "; } path += "Z"; Rect computedSearchWindow = Imgproc.boundingRect(biggestContour); Point massCenter = computedSearchWindow.tl(); // System.out.println("Contour area: " + contourArea); Mat contoursImage = userPointsImage.clone(); Imgproc.drawContours(contoursImage, contours, 0, newVal, 1); Scalar meanColor = Core.mean(original, region); FindingResponse findingResponse = new FindingResponse(path, meanColor, massCenter, -1, contourArea); findingResponses.add(findingResponse); // ImageUtils.saveImage(contoursImage, "mask_" + j + "_contourned" + imageForTextRecognition + ".png", request); } } } String jsonResponse = gson.toJson(findingResponses, ArrayList.class); out.println(jsonResponse); } }
From source file:servlets.processScribble.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods.//from w w w . j a va2 s . co m * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { String imageForTextRecognition = request.getParameter("imageForTextRecognition") + ".png"; Mat original = ImageUtils.loadImage(imageForTextRecognition, request); Mat image = original.clone(); Mat mask = Mat.zeros(image.rows() + 2, image.cols() + 2, CvType.CV_8UC1); String samplingPoints = request.getParameter("samplingPoints"); Gson gson = new Gson(); Point[] userPoints = gson.fromJson(samplingPoints, Point[].class); MatOfPoint points = new MatOfPoint(new Mat(userPoints.length, 1, CvType.CV_32SC2)); int cont = 0; for (Point point : userPoints) { int y = (int) point.y; int x = (int) point.x; int[] data = { x, y }; points.put(cont++, 0, data); } MatOfInt hull = new MatOfInt(); Imgproc.convexHull(points, hull); MatOfPoint mopOut = new MatOfPoint(); mopOut.create((int) hull.size().height, 1, CvType.CV_32SC2); int totalPoints = (int) hull.size().height; Point[] convexHullPoints = new Point[totalPoints]; ArrayList<Point> seeds = new ArrayList<>(); for (int i = 0; i < totalPoints; i++) { int index = (int) hull.get(i, 0)[0]; double[] point = new double[] { points.get(index, 0)[0], points.get(index, 0)[1] }; mopOut.put(i, 0, point); convexHullPoints[i] = new Point(point[0], point[1]); seeds.add(new Point(point[0], point[1])); } MatOfPoint mop = new MatOfPoint(); mop.fromArray(convexHullPoints); ArrayList<MatOfPoint> arrayList = new ArrayList<MatOfPoint>(); arrayList.add(mop); Random random = new Random(); int b = random.nextInt(256); int g = random.nextInt(256); int r = random.nextInt(256); Scalar newVal = new Scalar(b, g, r); FloodFillFacade floodFillFacade = new FloodFillFacade(); for (int i = 0; i < seeds.size(); i++) { Point seed = seeds.get(i); image = floodFillFacade.fill(image, mask, (int) seed.x, (int) seed.y, newVal); } Imgproc.drawContours(image, arrayList, 0, newVal, -1); Imgproc.resize(mask, mask, image.size()); Scalar meanColor = Core.mean(original, mask); // Highgui.imwrite("C:\\Users\\Gonzalo\\Documents\\NetBeansProjects\\iVoLVER\\uploads\\the_convexHull.png", image); ImageUtils.saveImage(image, imageForTextRecognition + "_the_convexHull.png", request); newVal = new Scalar(255, 255, 0); floodFillFacade.setMasked(false); System.out.println("Last one:"); floodFillFacade.fill(image, mask, 211, 194, newVal); Core.circle(image, new Point(211, 194), 5, new Scalar(0, 0, 0), -1); ImageUtils.saveImage(image, imageForTextRecognition + "_final.png", request); // Highgui.imwrite("C:\\Users\\Gonzalo\\Documents\\NetBeansProjects\\iVoLVER\\uploads\\final.png", image); Mat element = new Mat(3, 3, CvType.CV_8U, new Scalar(1)); Imgproc.morphologyEx(mask, mask, Imgproc.MORPH_CLOSE, element, new Point(-1, -1), 3); Imgproc.resize(mask, mask, image.size()); // ImageUtils.saveImage(mask, "final_mask_dilated.png", request); // Highgui.imwrite("C:\\Users\\Gonzalo\\Documents\\NetBeansProjects\\iVoLVER\\uploads\\final_mask_dilated.png", mask); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(mask.clone(), contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE); double contourArea = 0; String path = ""; MatOfPoint biggestContour = contours.get(0); // getting the biggest contour contourArea = Imgproc.contourArea(biggestContour); if (contours.size() > 1) { biggestContour = Collections.max(contours, new ContourComparator()); // getting the biggest contour in case there are more than one } Point[] biggestContourPoints = biggestContour.toArray(); path = "M " + (int) biggestContourPoints[0].x + " " + (int) biggestContourPoints[0].y + " "; for (int i = 1; i < biggestContourPoints.length; ++i) { Point v = biggestContourPoints[i]; path += "L " + (int) v.x + " " + (int) v.y + " "; } path += "Z"; System.out.println("path:"); System.out.println(path); Rect computedSearchWindow = Imgproc.boundingRect(biggestContour); Point massCenter = computedSearchWindow.tl(); FindingResponse findingResponse = new FindingResponse(path, meanColor, massCenter, -1, contourArea); String jsonResponse = gson.toJson(findingResponse, FindingResponse.class); out.println(jsonResponse); // String jsonResponse = gson.toJson(path); // out.println(jsonResponse); } }
From source file:simeav.filtros.instanciaciones.DetectorConectoresEstandar.java
@Override public Mat detectarConectores(Mat original, Mat mascaraModulos, Diagrama diagrama) { Mat sinCuadrados = Utils.borrarMascara(original, mascaraModulos); // dilato los conectores para que se superpongan con los cuadrados sinCuadrados = Utils.dilate(sinCuadrados); sinCuadrados = Utils.dilate(sinCuadrados); sinCuadrados = Utils.dilate(sinCuadrados); //elimino puntos que pueden haber quedado de la eliminacion de cuadrados ArrayList<MatOfPoint> contornos = Utils.detectarContornos(sinCuadrados); for (int i = 0; i < contornos.size(); i++) { double area = Imgproc.contourArea(contornos.get(i)); if (area <= 50) { Imgproc.drawContours(sinCuadrados, contornos, i, new Scalar(0, 0, 0), -1); }/*from w w w . ja va 2 s . c o m*/ } this.extremos = original.clone(); Mat mascara; String tipo_extremo1, tipo_extremo2; // Imagen en la que se va a dibuja el resultado Mat conectores = Mat.zeros(sinCuadrados.size(), CvType.CV_8UC3); Mat contorno; contornos = Utils.detectarContornos(sinCuadrados); Mat intersec = new Mat(); ArrayList<MatOfPoint> contornos_intersec; int r, g, b; for (int j = contornos.size() - 1; j >= 0; j--) { //Dibujo el contorno relleno, para despues sacar la interseccion con los cuadrados contorno = Mat.zeros(sinCuadrados.size(), CvType.CV_8UC3); Imgproc.drawContours(contorno, contornos, j, new Scalar(180, 255, 255), -1); Imgproc.cvtColor(contorno, contorno, Imgproc.COLOR_BGR2GRAY); //Calculo la interseccion con los cuadrados (se dibujan en intersec) Core.bitwise_and(contorno, mascaraModulos, intersec); //Saco los contornos de las intersecciones para saber donde estan contornos_intersec = Utils.detectarContornos(intersec); if (contornos_intersec.size() > 1) { Scalar color = Utils.getColorRandom(); for (int z = 0; z < contornos_intersec.size(); z++) { Imgproc.drawContours(conectores, contornos_intersec, z, color, -1); } ArrayList<Point> centros_extremos = Utils.getCentros(contornos_intersec); for (Point centros_extremo : centros_extremos) { Core.circle(conectores, centros_extremo, 4, color, -1); } analizarExtremos(j, centros_extremos, diagrama); Conector c = diagrama.getConector(j); Core.rectangle(conectores, c.getModuloDesde().getRectangulo().tl(), c.getModuloDesde().getRectangulo().br(), color, 3); Core.rectangle(conectores, c.getModuloHasta().getRectangulo().tl(), c.getModuloHasta().getRectangulo().br(), color, 3); Point tl_desde = new Point(c.getDesde().x - 20, c.getDesde().y - 20); Point br_desde = new Point(c.getDesde().x + 20, c.getDesde().y + 20); Point tl_hasta = new Point(c.getHasta().x - 20, c.getHasta().y - 20); Point br_hasta = new Point(c.getHasta().x + 20, c.getHasta().y + 20); mascara = new Mat(sinCuadrados.size(), CvType.CV_8U, new Scalar(255, 255, 255)); Core.rectangle(mascara, tl_desde, br_desde, new Scalar(0, 0, 0), -1); tipo_extremo1 = clasificarExtremo(Utils.borrarMascara(original, mascara)); mascara = new Mat(sinCuadrados.size(), CvType.CV_8U, new Scalar(255, 255, 255)); Core.rectangle(mascara, tl_hasta, br_hasta, new Scalar(0, 0, 0), -1); tipo_extremo2 = clasificarExtremo(Utils.borrarMascara(original, mascara)); if (!tipo_extremo1.equals(tipo_extremo2)) { if (tipo_extremo1.equals("Normal")) c.setTipo(tipo_extremo2); else if (tipo_extremo2.equals("Normal")) { Modulo aux = c.getModuloDesde(); c.setDesde(c.getModuloHasta()); c.setHacia(aux); Point p_aux = c.getDesde(); c.setDesde(c.getHasta()); c.setHasta(p_aux); c.setTipo(tipo_extremo1); } else { c.setTipo("Indeterminado"); } } else { c.setTipo("Indeterminado"); } } } return conectores; }
From source file:simeav.filtros.instanciaciones.DetectorModulosEstandar.java
@Override public Mat detectarModulos(Mat original, Diagrama diagrama) { Imgproc.blur(original, original, new Size(15, 15)); original = Utils.dilate(original);//from www . java 2 s. c o m Mat jerarquia = new Mat(); ArrayList<MatOfPoint> contornos = new ArrayList<>(); Imgproc.findContours(original.clone(), contornos, jerarquia, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); ArrayList<MatOfPoint> cp = new ArrayList<>(contornos.size()); Map<Integer, Rect> rectangulos = new HashMap<>(); Integer id_cuadrado = 0; Mat resultado = Mat.zeros(original.size(), CvType.CV_8U); for (int i = contornos.size() - 1; i >= 0; i--) { if (jerarquia.get(0, i)[3] > -1) { MatOfPoint2f contorno2f = new MatOfPoint2f(); contorno2f.fromList(contornos.get(i).toList()); MatOfPoint2f c = new MatOfPoint2f(); Imgproc.approxPolyDP(contorno2f, c, 3, true); cp.add(new MatOfPoint(c.toArray())); int lados = cp.get(cp.size() - 1).height(); if ((4 <= lados) && lados < 12) { rectangulos.put(id_cuadrado, Imgproc.boundingRect(new MatOfPoint(c.toArray()))); Point tl = new Point(rectangulos.get(id_cuadrado).tl().x - 20, rectangulos.get(id_cuadrado).tl().y - 20); Point br = new Point(rectangulos.get(id_cuadrado).br().x + 20, rectangulos.get(id_cuadrado).br().y + 20); Core.rectangle(resultado, tl, br, new Scalar(255, 255, 255), -1); diagrama.addModulo(id_cuadrado, new Rect(tl, br)); Imgproc.drawContours(resultado, contornos, i, new Scalar(0, 0, 0), -1); id_cuadrado++; } } } return resultado; }
From source file:simeav.filtros.instanciaciones.PreprocesadorEstandar.java
private Mat redimensionar(Mat original) { Size orig = original.size(); if (orig.height > 1200 && orig.width > 1500) { double tamX, tamY; double relX = orig.width / 800; double relY = orig.height / 600; if (orig.width / orig.height > 800 / 600) { tamX = 800;/*from w ww. j a va2 s . co m*/ tamY = orig.height / relX; } else { tamX = orig.width / relY; tamY = 600; } Mat resultado = new Mat(); Imgproc.resize(original, resultado, new Size(tamX, tamY)); return resultado; } return original; }
From source file:simeav.filtros.instanciaciones.SeparadorTextoEstandar.java
@Override public void separarTexto(Mat origin, int umbral_radio) { Mat original = origin.clone(); imagenSinTexto = original.clone();/*from w ww. java2 s .c o m*/ imagenTexto = Mat.zeros(original.size(), CvType.CV_8U); imagenTexto = imagenTexto.setTo(new Scalar(255, 255, 255)); Mat imGrises = new Mat(); Mat bw = new Mat(); Imgproc.cvtColor(original, imGrises, Imgproc.COLOR_BGR2GRAY); Imgproc.GaussianBlur(imGrises, bw, new Size(1, 1), 0); Imgproc.threshold(bw, bw, 200, 250, Imgproc.THRESH_BINARY_INV); ArrayList<MatOfPoint> contornos = Utils.detectarContornos(bw); for (int i = 0; i < contornos.size(); i++) { MatOfPoint2f contorno2f = new MatOfPoint2f(); contorno2f.fromList(contornos.get(i).toList()); float[] radius = new float[1]; Point centro = new Point(); Imgproc.minEnclosingCircle(contorno2f, centro, radius); double a = Imgproc.contourArea(contornos.get(i)); if (radius[0] <= umbral_radio) { Imgproc.drawContours(imagenSinTexto, contornos, i, new Scalar(255, 255, 255), -1); Imgproc.drawContours(imagenTexto, contornos, i, new Scalar(0, 0, 0), -1); } } }
From source file:simeav.Utils.java
public static ArrayList<Point> detectarVertices(Mat original) { MatOfPoint corners = new MatOfPoint(); Imgproc.goodFeaturesToTrack(original, corners, 100, 0.01, 0, new Mat(), 2, false, 0.04); Mat vertices = Mat.zeros(original.size(), CvType.CV_8UC3); for (int i = 0; i < corners.height(); i++) { Core.circle(vertices, new Point(corners.get(i, 0)), 8, new Scalar(180, 170, 5), -1); }/* w w w. j a v a 2s . c om*/ Mat imGrises = new Mat(); Mat bw = new Mat(); Imgproc.cvtColor(vertices, imGrises, Imgproc.COLOR_BGR2GRAY); Imgproc.Canny(imGrises, bw, 100, 150, 5, true); Mat jerarquia = new Mat(); ArrayList<MatOfPoint> contornos = new ArrayList<>(); Imgproc.findContours(bw.clone(), contornos, jerarquia, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); ArrayList<Point> mc = Utils.getCentros(contornos); Mat resultado = Mat.zeros(original.size(), CvType.CV_8UC3); for (int i = 0; i < contornos.size(); i++) { Scalar color = new Scalar(180, 170, 5); // Imgproc.drawContours(resultado, contornos, i, color, 2, 8, jerarquia, 0, new Point()); Core.circle(resultado, mc.get(i), 4, color, -1, 8, 0); } return mc; }
From source file:src.model.filters.DotsFilter.java
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { System.out.println("**______________DOTS_______________**"); try {//from w ww .j a v a2 s . c o m String imgInput = request.getParameter("name").toString(); String savePath = savePath(request); //____________________________________ int elementSize = 2; int bsize = 10; Mat source = Imgcodecs.imread(savePath); Mat dst = zeros(source.size(), CV_8UC3); Mat cir = zeros(source.size(), CV_8UC1); Mat destination = new Mat(source.rows(), source.cols(), source.type()); Mat element = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(elementSize * 3 + 1, elementSize * 3 + 1), new Point(elementSize, elementSize)); for (int i = 0; i < source.rows(); i += bsize) { for (int j = 0; j < source.cols(); j += bsize) { circle(cir, new Point(j + bsize / (double) 2, i + bsize / (double) 2), bsize / 2 - 1, new Scalar(255, 255, 255), -1, -1, Core.LINE_AA); } } Imgproc.morphologyEx(source, dst, Imgproc.MORPH_CLOSE, element); Mat cir_32f = new Mat(source.rows(), source.cols(), CV_32F); cir.convertTo(cir_32f, CV_32F); normalize(cir_32f, cir_32f, 0, 1, NORM_MINMAX); Mat dst_32f = new Mat(source.rows(), source.cols(), CV_32F); dst.convertTo(dst_32f, CV_32F); Vector<Mat> channels = new Vector(); split(dst_32f, channels); System.out.println(channels.size()); for (int i = 0; i < channels.size(); ++i) { channels.set(i, channels.get(i).mul(cir_32f)); } merge(channels, dst_32f); dst_32f.convertTo(dst, CV_8U); // Core.gemm(source, source, bsize, source, bsize, dst); // Core.gemm(cir, destination, 1, new Mat(), 0,dst , 0); // Imgcodecs.imwrite("images\\outddput.jpg", dst); String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_DOTS_temp.jpg"; imgInput = request.getParameter("name").toString(); String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_DOTS_temp.jpg"; Imgcodecs.imwrite(output, dst); //____________________________________ System.out.println("output: " + output); System.out.println("imgOutput: " + imgOutput); publishImg(response, imgOutput); } catch (Exception e) { System.out.println("Error: " + e.getMessage()); } }