List of usage examples for org.opencv.core Mat zeros
public static Mat zeros(Size size, int type)
From source file:org.usfirst.frc.team2084.CMonster2016.vision.CoordinateMath.java
License:Open Source License
/** * Multiply two matrices.//from w ww. j a v a2s.c o m * * @param src1 the first matrix * @param src2 the second matrix * @param dest the output matrix */ public static void matMult(Mat src1, Mat src2, Mat dest) { Core.gemm(src1, src2, 1, Mat.zeros(dest.size(), dest.type()), 1, dest); }
From source file:saliency.saliency.java
/** * @param args the command line arguments *//* ww w . j av a 2 s . co m*/ public static void main(String[] args) { // TODO code application logic here System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat input_img = imread("input_img/sea.jpg"); //fot temp test start Imgproc.resize(input_img, input_img, new Size(1980, 1080), 0, 0, Imgproc.INTER_LINEAR); //fot temp test end if (input_img.cols() == 0) { return; } //benchmark ///////////////////////step 1 : Extraction of Early Visual Deatures/////////////////////////////// //intensity image: intensity_img Mat intensity_img = new Mat(input_img.rows(), input_img.cols(), CV_16UC1); //intensity = (R+G+B)/3 int img_width = intensity_img.cols(); int img_height = intensity_img.rows(); int x, y; int i, c, s; int max_intensity = 0; for (x = 0; x < img_width; x++) { for (y = 0; y < img_height; y++) { int temp_intensity = ((int) input_img.get(y, x)[0] + (int) input_img.get(y, x)[1] + (int) input_img.get(y, x)[2]) / 3; intensity_img.put(y, x, temp_intensity); if (max_intensity < temp_intensity) { max_intensity = temp_intensity; } } } //create Guassian pyramid for intensity Mat[] i_gaussian_pyramid = new Mat[9]; i_gaussian_pyramid[0] = intensity_img.clone(); for (i = 0; i < 8; i++) { i_gaussian_pyramid[i + 1] = i_gaussian_pyramid[i].clone(); Imgproc.pyrDown(i_gaussian_pyramid[i + 1], i_gaussian_pyramid[i + 1], new Size()); } //create intensity feature map using center-surround differences Mat[][] intensity_feature_map = new Mat[3][2]; for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { intensity_feature_map[c][s] = center_surround.main(i_gaussian_pyramid[c + 2], i_gaussian_pyramid[s + c + 5], 0); } } //benchmark //imwrite("intensity_feature_map_00.bmp", intensity_feature_map[0][0]); //get normalized color image by I. Mat norm_input_img = input_img.clone(); norm_input_img.convertTo(norm_input_img, CV_64F); for (x = 0; x < img_width; x++) { for (y = 0; y < img_height; y++) { //normalization is only applied at the locations where I is larger than 1/10 of its maximum over entire image double[] temp = new double[3]; if (intensity_img.get(y, x)[0] > (max_intensity / 10)) { temp[0] = norm_input_img.get(y, x)[0] / intensity_img.get(y, x)[0]; temp[1] = norm_input_img.get(y, x)[1] / intensity_img.get(y, x)[0]; temp[2] = norm_input_img.get(y, x)[2] / intensity_img.get(y, x)[0]; norm_input_img.put(y, x, temp); } else { temp[0] = 0; temp[1] = 0; temp[2] = 0; norm_input_img.put(y, x, temp); } } } //get R G B Y(Yellow) single color channel images Mat r_img = new Mat(input_img.rows(), input_img.cols(), CV_64FC1); Mat g_img = new Mat(input_img.rows(), input_img.cols(), CV_64FC1); Mat b_img = new Mat(input_img.rows(), input_img.cols(), CV_64FC1); Mat y_img = new Mat(input_img.rows(), input_img.cols(), CV_64FC1); //[0]: b [1]:g [2]:r for (x = 0; x < img_width; x++) { for (y = 0; y < img_height; y++) { //R = min(0,r-(g+b)/2) double temp_chroma = max(0, (norm_input_img.get(y, x)[2] - (norm_input_img.get(y, x)[1] + norm_input_img.get(y, x)[0]) / 2)); r_img.put(y, x, temp_chroma); //G = max(0,g-(r+b)/2) temp_chroma = max(0, (norm_input_img.get(y, x)[1] - (norm_input_img.get(y, x)[2] + norm_input_img.get(y, x)[0]) / 2)); g_img.put(y, x, temp_chroma); //B = max(0,b-(r+g)/2) temp_chroma = max(0, (norm_input_img.get(y, x)[0] - (norm_input_img.get(y, x)[2] + norm_input_img.get(y, x)[1]) / 2)); b_img.put(y, x, temp_chroma); //Y = max(0,(r+g)/2-|r-g|/2-b) temp_chroma = max(0, ((norm_input_img.get(y, x)[2] + norm_input_img.get(y, x)[1]) / 2 - abs(norm_input_img.get(y, x)[2] + norm_input_img.get(y, x)[1]) / 2 - norm_input_img.get(y, x)[0])); y_img.put(y, x, temp_chroma); } } //create Gaussian pyramid for 4 color channels Mat[] b_gaussian_pyramid = new Mat[9]; b_gaussian_pyramid[0] = b_img.clone(); for (i = 0; i < 8; i++) { b_gaussian_pyramid[i + 1] = b_gaussian_pyramid[i].clone(); Imgproc.pyrDown(b_gaussian_pyramid[i + 1], b_gaussian_pyramid[i + 1], new Size()); } Mat[] g_gaussian_pyramid = new Mat[9]; g_gaussian_pyramid[0] = g_img.clone(); for (i = 0; i < 8; i++) { g_gaussian_pyramid[i + 1] = g_gaussian_pyramid[i].clone(); Imgproc.pyrDown(g_gaussian_pyramid[i + 1], g_gaussian_pyramid[i + 1], new Size()); } Mat[] r_gaussian_pyramid = new Mat[9]; r_gaussian_pyramid[0] = r_img.clone(); for (i = 0; i < 8; i++) { r_gaussian_pyramid[i + 1] = r_gaussian_pyramid[i].clone(); Imgproc.pyrDown(r_gaussian_pyramid[i + 1], r_gaussian_pyramid[i + 1], new Size()); } Mat[] y_gaussian_pyramid = new Mat[9]; y_gaussian_pyramid[0] = y_img.clone(); for (i = 0; i < 8; i++) { y_gaussian_pyramid[i + 1] = y_gaussian_pyramid[i].clone(); Imgproc.pyrDown(y_gaussian_pyramid[i + 1], y_gaussian_pyramid[i + 1], new Size()); } //create color feature map using center-surround differences //RG(c,s) = |(R(c)-G(c))(-)(G(c)-R(c))| Mat[][] rg_feature_map = new Mat[3][2]; for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Mat r_minus_g = r_gaussian_pyramid[c + 2].clone(); Core.subtract(r_gaussian_pyramid[c + 2], g_gaussian_pyramid[c + 2], r_minus_g); Mat g_minus_r = g_gaussian_pyramid[s + c + 5].clone(); Core.subtract(g_gaussian_pyramid[s + c + 5], r_gaussian_pyramid[s + c + 5], g_minus_r); rg_feature_map[c][s] = center_surround.main(r_minus_g, g_minus_r, 1); } } //BY(c,s) = |(B(c)-Y(c))(-)(Y(c)-B(c))| Mat[][] by_feature_map = new Mat[3][2]; for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Mat b_minus_g = b_gaussian_pyramid[c + 2].clone(); Core.subtract(b_gaussian_pyramid[c + 2], y_gaussian_pyramid[c + 2], b_minus_g); Mat y_minus_b = y_gaussian_pyramid[s + c + 5].clone(); Core.subtract(y_gaussian_pyramid[s + c + 5], b_gaussian_pyramid[s + c + 5], y_minus_b); by_feature_map[c][s] = center_surround.main(b_minus_g, y_minus_b, 1); } } //benchmark //create oriented Gabor pyramid from intensity image int kernel_size = 10;//31;//adjust value according to reference double sigma = 3;//default: = 0.56 . the larger , the support of the Gabor function and the number of visible parallel excitatory and inhibitory stripe zones increases. double[] theta = new double[4]; theta[0] = 0; theta[1] = Math.PI / 4; theta[2] = Math.PI / 2; theta[3] = Math.PI * 3 / 4; double lambda = 5;//36; minimum 3 double gamma = 0.5;//0.02; // double psi = 0; Mat[][] gabor_pyramid = new Mat[4][9]; int theta_index; for (theta_index = 0; theta_index < 4; theta_index++) { Mat gabor_kernel = Imgproc.getGaborKernel(new Size(kernel_size, kernel_size), sigma, theta[theta_index], lambda, gamma); //gabor_pyramid[theta_index][0] = intensity_img.clone(); for (i = 0; i < 9; i++) { //gabor_pyramid[theta_index][i] = gabor_pyramid[theta_index][i].clone(); gabor_pyramid[theta_index][i] = i_gaussian_pyramid[i].clone(); Imgproc.filter2D(i_gaussian_pyramid[i], gabor_pyramid[theta_index][i], -1, gabor_kernel); //Imgproc.resize(gabor_pyramid[theta_index][i], gabor_pyramid[theta_index][i], new Size(), 0.5, 0.5, Imgproc.INTER_AREA); } } //imwrite("gabor_pyramid_01.bmp", gabor_pyramid[0][1]); //imwrite("gabor_pyramid_11.bmp", gabor_pyramid[1][1]); //imwrite("gabor_pyramid_21.bmp", gabor_pyramid[2][1]); //imwrite("gabor_pyramid_31.bmp", gabor_pyramid[3][1]); //imwrite("gabor_pyramid_03.bmp", gabor_pyramid[0][3]); //get orientation feature map using center-surround differences Mat[][][] orientation_feature_map = new Mat[4][3][2]; for (theta_index = 0; theta_index < 4; theta_index++) { for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { orientation_feature_map[theta_index][c][s] = center_surround .main(gabor_pyramid[theta_index][c + 2], gabor_pyramid[theta_index][s + c + 5], 0); } } } //benchmark //imwrite("orientation_test_00.bmp", orientation_feature_map[0][0][0]); ///////////////////////step 2 : the saliency map/////////////////////////////// //get intensity conspicuity map Mat intensity_conspicuity_map = Mat.zeros(intensity_feature_map[2][0].size(), CV_16UC1); for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Mat norm_out = map_norm.main(intensity_feature_map[c][s]); Mat resized_feature_map = Mat.zeros(intensity_feature_map[2][0].size(), CV_16UC1); Imgproc.resize(norm_out, resized_feature_map, intensity_feature_map[2][0].size(), 0, 0, Imgproc.INTER_LINEAR); Core.addWeighted(intensity_conspicuity_map, 1, resized_feature_map, 1.0 / 6, 0, intensity_conspicuity_map); /*if(c == 0 && s == 0){ imwrite("in.bmp", intensity_feature_map[c][s]); imwrite("map_norm.bmp",norm_out); imwrite("resized_feature_map.bmp", resized_feature_map); }*/ } } //benchmark //Core.normalize(intensity_conspicuity_map, intensity_conspicuity_map, 0, 255, Core.NORM_MINMAX); //imwrite("intensity_conspicuity_map.bmp", intensity_conspicuity_map); //get color conspicuity map for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Core.normalize(rg_feature_map[c][s], rg_feature_map[c][s], 0, 255, Core.NORM_MINMAX); rg_feature_map[c][s].convertTo(rg_feature_map[c][s], CV_16UC1); Core.normalize(by_feature_map[c][s], by_feature_map[c][s], 0, 255, Core.NORM_MINMAX); by_feature_map[c][s].convertTo(by_feature_map[c][s], CV_16UC1); } } //imwrite("test_rg.bmp",rg_feature_map[0][0]); Mat color_conspicuity_map = Mat.zeros(rg_feature_map[2][0].size(), CV_16UC1); for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Mat norm_out = map_norm.main(rg_feature_map[c][s]); Mat resized_feature_map = Mat.zeros(rg_feature_map[2][0].size(), CV_16UC1); Imgproc.resize(norm_out, resized_feature_map, rg_feature_map[2][0].size(), 0, 0, Imgproc.INTER_LINEAR); Core.addWeighted(color_conspicuity_map, 1, resized_feature_map, 1.0 / 12, 0, color_conspicuity_map); norm_out = map_norm.main(by_feature_map[c][s]); resized_feature_map = Mat.zeros(by_feature_map[2][0].size(), CV_16UC1); Imgproc.resize(norm_out, resized_feature_map, by_feature_map[2][0].size(), 0, 0, Imgproc.INTER_LINEAR); Core.addWeighted(color_conspicuity_map, 1, resized_feature_map, 1.0 / 12, 0, color_conspicuity_map); } } //benchmark //get orientation conspicuity map Mat orientation_conspicuity_map_0 = Mat.zeros(orientation_feature_map[0][2][0].size(), CV_16UC1); for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Mat norm_out = map_norm.main(orientation_feature_map[0][c][s]); Mat resized_feature_map = Mat.zeros(orientation_feature_map[0][2][0].size(), CV_16UC1); Imgproc.resize(norm_out, resized_feature_map, orientation_feature_map[0][2][0].size(), 0, 0, Imgproc.INTER_LINEAR); Core.addWeighted(orientation_conspicuity_map_0, 1, resized_feature_map, 1.0 / 6, 0, orientation_conspicuity_map_0); } } Mat orientation_conspicuity_map_1 = Mat.zeros(orientation_feature_map[1][2][0].size(), CV_16UC1); for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Mat norm_out = map_norm.main(orientation_feature_map[1][c][s]); Mat resized_feature_map = Mat.zeros(orientation_feature_map[1][2][0].size(), CV_16UC1); Imgproc.resize(norm_out, resized_feature_map, orientation_feature_map[1][2][0].size(), 0, 0, Imgproc.INTER_LINEAR); Core.addWeighted(orientation_conspicuity_map_1, 1, resized_feature_map, 1.0 / 6, 0, orientation_conspicuity_map_1); } } Mat orientation_conspicuity_map_2 = Mat.zeros(orientation_feature_map[2][2][0].size(), CV_16UC1); for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Mat norm_out = map_norm.main(orientation_feature_map[2][c][s]); Mat resized_feature_map = Mat.zeros(orientation_feature_map[2][2][0].size(), CV_16UC1); Imgproc.resize(norm_out, resized_feature_map, orientation_feature_map[2][2][0].size(), 0, 0, Imgproc.INTER_LINEAR); Core.addWeighted(orientation_conspicuity_map_2, 1, resized_feature_map, 1.0 / 6, 0, orientation_conspicuity_map_2); } } Mat orientation_conspicuity_map_3 = Mat.zeros(orientation_feature_map[3][2][0].size(), CV_16UC1); for (c = 0; c < 3; c++) { for (s = 0; s < 2; s++) { Mat norm_out = map_norm.main(orientation_feature_map[3][c][s]); Mat resized_feature_map = Mat.zeros(orientation_feature_map[3][2][0].size(), CV_16UC1); Imgproc.resize(norm_out, resized_feature_map, orientation_feature_map[3][2][0].size(), 0, 0, Imgproc.INTER_LINEAR); Core.addWeighted(orientation_conspicuity_map_3, 1, resized_feature_map, 1.0 / 6, 0, orientation_conspicuity_map_3); } } Mat orientation_conspicuity_map = Mat.zeros(orientation_feature_map[0][2][0].size(), CV_16UC1); Core.addWeighted(orientation_conspicuity_map, 1, map_norm.main(orientation_conspicuity_map_0), 1.0 / 4, 0, orientation_conspicuity_map); Core.addWeighted(orientation_conspicuity_map, 1, map_norm.main(orientation_conspicuity_map_1), 1.0 / 4, 0, orientation_conspicuity_map); Core.addWeighted(orientation_conspicuity_map, 1, map_norm.main(orientation_conspicuity_map_2), 1.0 / 4, 0, orientation_conspicuity_map); Core.addWeighted(orientation_conspicuity_map, 1, map_norm.main(orientation_conspicuity_map_3), 1.0 / 4, 0, orientation_conspicuity_map); //benchmark Mat saliency = Mat.zeros(intensity_conspicuity_map.size(), CV_16UC1); Core.addWeighted(saliency, 1, map_norm.main(intensity_conspicuity_map), 1.0 / 3, 0, saliency); Core.addWeighted(saliency, 1, map_norm.main(color_conspicuity_map), 1.0 / 3, 0, saliency); Core.addWeighted(saliency, 1, map_norm.main(orientation_conspicuity_map), 1.0 / 3, 0, saliency); //benchmark Core.normalize(saliency, saliency, 0, 255, Core.NORM_MINMAX); //fot temp test Imgproc.resize(saliency, saliency, new Size(720, 480), 0, 0, Imgproc.INTER_LINEAR); imwrite("saliency.bmp", saliency); Core.normalize(intensity_conspicuity_map, intensity_conspicuity_map, 0, 255, Core.NORM_MINMAX); Imgproc.resize(intensity_conspicuity_map, intensity_conspicuity_map, new Size(720, 480), 0, 0, Imgproc.INTER_LINEAR); imwrite("intensity_conspicuity_map.bmp", intensity_conspicuity_map); Core.normalize(color_conspicuity_map, color_conspicuity_map, 0, 255, Core.NORM_MINMAX); Imgproc.resize(color_conspicuity_map, color_conspicuity_map, new Size(720, 480), 0, 0, Imgproc.INTER_LINEAR); imwrite("color_conspicuity_map.bmp", color_conspicuity_map); Core.normalize(orientation_conspicuity_map, orientation_conspicuity_map, 0, 255, Core.NORM_MINMAX); Imgproc.resize(orientation_conspicuity_map, orientation_conspicuity_map, new Size(720, 480), 0, 0, Imgproc.INTER_LINEAR); imwrite("orientation_conspicuity_map.bmp", orientation_conspicuity_map); Imgproc.resize(input_img, input_img, new Size(720, 480), 0, 0, Imgproc.INTER_LINEAR); imwrite("input_img.bmp", input_img); //for testing algorithm /* Mat temp1 = Mat.zeros(intensity_conspicuity_map.size(), CV_16UC1); temp1 = map_norm.main(intensity_conspicuity_map); Core.normalize(temp1, temp1, 0, 255, Core.NORM_MINMAX); Imgproc.resize(temp1, temp1, new Size(720,480), 0, 0, Imgproc.INTER_LINEAR); imwrite("intensity.bmp", temp1); temp1 = map_norm.main(color_conspicuity_map); Core.normalize(temp1, temp1, 0, 255, Core.NORM_MINMAX); Imgproc.resize(temp1, temp1, new Size(720,480), 0, 0, Imgproc.INTER_LINEAR); imwrite("color.bmp", temp1); temp1 = map_norm.main(orientation_conspicuity_map); Core.normalize(temp1, temp1, 0, 255, Core.NORM_MINMAX); Imgproc.resize(temp1, temp1, new Size(720,480), 0, 0, Imgproc.INTER_LINEAR); imwrite("orientation.bmp", temp1); Mat temp2 = Mat.zeros(orientation_conspicuity_map_0.size(), CV_16UC1); temp2 = map_norm.main(orientation_conspicuity_map_0); Core.normalize(temp2, temp2, 0, 255, Core.NORM_MINMAX); Imgproc.resize(temp2, temp2, new Size(720,480), 0, 0, Imgproc.INTER_LINEAR); imwrite("orientation_conspicuity_map_0.bmp", temp2); temp2 = map_norm.main(orientation_conspicuity_map_1); Core.normalize(temp2, temp2, 0, 255, Core.NORM_MINMAX); Imgproc.resize(temp2, temp2, new Size(720,480), 0, 0, Imgproc.INTER_LINEAR); imwrite("orientation_conspicuity_map_1.bmp", temp2); temp2 = map_norm.main(orientation_conspicuity_map_2); Core.normalize(temp2, temp2, 0, 255, Core.NORM_MINMAX); Imgproc.resize(temp2, temp2, new Size(720,480), 0, 0, Imgproc.INTER_LINEAR); imwrite("orientation_conspicuity_map_2.bmp", temp2); temp2 = map_norm.main(orientation_conspicuity_map_3); Core.normalize(temp2, temp2, 0, 255, Core.NORM_MINMAX); Imgproc.resize(temp2, temp2, new Size(720,480), 0, 0, Imgproc.INTER_LINEAR); imwrite("orientation_conspicuity_map_3.bmp", temp2); */ }
From source file:simeav.filtros.instanciaciones.DetectorConectoresEstandar.java
@Override public Mat detectarConectores(Mat original, Mat mascaraModulos, Diagrama diagrama) { Mat sinCuadrados = Utils.borrarMascara(original, mascaraModulos); // dilato los conectores para que se superpongan con los cuadrados sinCuadrados = Utils.dilate(sinCuadrados); sinCuadrados = Utils.dilate(sinCuadrados); sinCuadrados = Utils.dilate(sinCuadrados); //elimino puntos que pueden haber quedado de la eliminacion de cuadrados ArrayList<MatOfPoint> contornos = Utils.detectarContornos(sinCuadrados); for (int i = 0; i < contornos.size(); i++) { double area = Imgproc.contourArea(contornos.get(i)); if (area <= 50) { Imgproc.drawContours(sinCuadrados, contornos, i, new Scalar(0, 0, 0), -1); }/*from w ww . j a v a 2s . com*/ } this.extremos = original.clone(); Mat mascara; String tipo_extremo1, tipo_extremo2; // Imagen en la que se va a dibuja el resultado Mat conectores = Mat.zeros(sinCuadrados.size(), CvType.CV_8UC3); Mat contorno; contornos = Utils.detectarContornos(sinCuadrados); Mat intersec = new Mat(); ArrayList<MatOfPoint> contornos_intersec; int r, g, b; for (int j = contornos.size() - 1; j >= 0; j--) { //Dibujo el contorno relleno, para despues sacar la interseccion con los cuadrados contorno = Mat.zeros(sinCuadrados.size(), CvType.CV_8UC3); Imgproc.drawContours(contorno, contornos, j, new Scalar(180, 255, 255), -1); Imgproc.cvtColor(contorno, contorno, Imgproc.COLOR_BGR2GRAY); //Calculo la interseccion con los cuadrados (se dibujan en intersec) Core.bitwise_and(contorno, mascaraModulos, intersec); //Saco los contornos de las intersecciones para saber donde estan contornos_intersec = Utils.detectarContornos(intersec); if (contornos_intersec.size() > 1) { Scalar color = Utils.getColorRandom(); for (int z = 0; z < contornos_intersec.size(); z++) { Imgproc.drawContours(conectores, contornos_intersec, z, color, -1); } ArrayList<Point> centros_extremos = Utils.getCentros(contornos_intersec); for (Point centros_extremo : centros_extremos) { Core.circle(conectores, centros_extremo, 4, color, -1); } analizarExtremos(j, centros_extremos, diagrama); Conector c = diagrama.getConector(j); Core.rectangle(conectores, c.getModuloDesde().getRectangulo().tl(), c.getModuloDesde().getRectangulo().br(), color, 3); Core.rectangle(conectores, c.getModuloHasta().getRectangulo().tl(), c.getModuloHasta().getRectangulo().br(), color, 3); Point tl_desde = new Point(c.getDesde().x - 20, c.getDesde().y - 20); Point br_desde = new Point(c.getDesde().x + 20, c.getDesde().y + 20); Point tl_hasta = new Point(c.getHasta().x - 20, c.getHasta().y - 20); Point br_hasta = new Point(c.getHasta().x + 20, c.getHasta().y + 20); mascara = new Mat(sinCuadrados.size(), CvType.CV_8U, new Scalar(255, 255, 255)); Core.rectangle(mascara, tl_desde, br_desde, new Scalar(0, 0, 0), -1); tipo_extremo1 = clasificarExtremo(Utils.borrarMascara(original, mascara)); mascara = new Mat(sinCuadrados.size(), CvType.CV_8U, new Scalar(255, 255, 255)); Core.rectangle(mascara, tl_hasta, br_hasta, new Scalar(0, 0, 0), -1); tipo_extremo2 = clasificarExtremo(Utils.borrarMascara(original, mascara)); if (!tipo_extremo1.equals(tipo_extremo2)) { if (tipo_extremo1.equals("Normal")) c.setTipo(tipo_extremo2); else if (tipo_extremo2.equals("Normal")) { Modulo aux = c.getModuloDesde(); c.setDesde(c.getModuloHasta()); c.setHacia(aux); Point p_aux = c.getDesde(); c.setDesde(c.getHasta()); c.setHasta(p_aux); c.setTipo(tipo_extremo1); } else { c.setTipo("Indeterminado"); } } else { c.setTipo("Indeterminado"); } } } return conectores; }
From source file:simeav.filtros.instanciaciones.DetectorModulosEstandar.java
@Override public Mat detectarModulos(Mat original, Diagrama diagrama) { Imgproc.blur(original, original, new Size(15, 15)); original = Utils.dilate(original);/*from w w w. ja v a 2s . c o m*/ Mat jerarquia = new Mat(); ArrayList<MatOfPoint> contornos = new ArrayList<>(); Imgproc.findContours(original.clone(), contornos, jerarquia, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE); ArrayList<MatOfPoint> cp = new ArrayList<>(contornos.size()); Map<Integer, Rect> rectangulos = new HashMap<>(); Integer id_cuadrado = 0; Mat resultado = Mat.zeros(original.size(), CvType.CV_8U); for (int i = contornos.size() - 1; i >= 0; i--) { if (jerarquia.get(0, i)[3] > -1) { MatOfPoint2f contorno2f = new MatOfPoint2f(); contorno2f.fromList(contornos.get(i).toList()); MatOfPoint2f c = new MatOfPoint2f(); Imgproc.approxPolyDP(contorno2f, c, 3, true); cp.add(new MatOfPoint(c.toArray())); int lados = cp.get(cp.size() - 1).height(); if ((4 <= lados) && lados < 12) { rectangulos.put(id_cuadrado, Imgproc.boundingRect(new MatOfPoint(c.toArray()))); Point tl = new Point(rectangulos.get(id_cuadrado).tl().x - 20, rectangulos.get(id_cuadrado).tl().y - 20); Point br = new Point(rectangulos.get(id_cuadrado).br().x + 20, rectangulos.get(id_cuadrado).br().y + 20); Core.rectangle(resultado, tl, br, new Scalar(255, 255, 255), -1); diagrama.addModulo(id_cuadrado, new Rect(tl, br)); Imgproc.drawContours(resultado, contornos, i, new Scalar(0, 0, 0), -1); id_cuadrado++; } } } return resultado; }
From source file:simeav.filtros.instanciaciones.SeparadorTextoEstandar.java
@Override public void separarTexto(Mat origin, int umbral_radio) { Mat original = origin.clone();//from w ww . ja v a 2s. co m imagenSinTexto = original.clone(); imagenTexto = Mat.zeros(original.size(), CvType.CV_8U); imagenTexto = imagenTexto.setTo(new Scalar(255, 255, 255)); Mat imGrises = new Mat(); Mat bw = new Mat(); Imgproc.cvtColor(original, imGrises, Imgproc.COLOR_BGR2GRAY); Imgproc.GaussianBlur(imGrises, bw, new Size(1, 1), 0); Imgproc.threshold(bw, bw, 200, 250, Imgproc.THRESH_BINARY_INV); ArrayList<MatOfPoint> contornos = Utils.detectarContornos(bw); for (int i = 0; i < contornos.size(); i++) { MatOfPoint2f contorno2f = new MatOfPoint2f(); contorno2f.fromList(contornos.get(i).toList()); float[] radius = new float[1]; Point centro = new Point(); Imgproc.minEnclosingCircle(contorno2f, centro, radius); double a = Imgproc.contourArea(contornos.get(i)); if (radius[0] <= umbral_radio) { Imgproc.drawContours(imagenSinTexto, contornos, i, new Scalar(255, 255, 255), -1); Imgproc.drawContours(imagenTexto, contornos, i, new Scalar(0, 0, 0), -1); } } }
From source file:simeav.Utils.java
public static ArrayList<Point> detectarVertices(Mat original) { MatOfPoint corners = new MatOfPoint(); Imgproc.goodFeaturesToTrack(original, corners, 100, 0.01, 0, new Mat(), 2, false, 0.04); Mat vertices = Mat.zeros(original.size(), CvType.CV_8UC3); for (int i = 0; i < corners.height(); i++) { Core.circle(vertices, new Point(corners.get(i, 0)), 8, new Scalar(180, 170, 5), -1); }//from www .j a va 2s. c o m Mat imGrises = new Mat(); Mat bw = new Mat(); Imgproc.cvtColor(vertices, imGrises, Imgproc.COLOR_BGR2GRAY); Imgproc.Canny(imGrises, bw, 100, 150, 5, true); Mat jerarquia = new Mat(); ArrayList<MatOfPoint> contornos = new ArrayList<>(); Imgproc.findContours(bw.clone(), contornos, jerarquia, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); ArrayList<Point> mc = Utils.getCentros(contornos); Mat resultado = Mat.zeros(original.size(), CvType.CV_8UC3); for (int i = 0; i < contornos.size(); i++) { Scalar color = new Scalar(180, 170, 5); // Imgproc.drawContours(resultado, contornos, i, color, 2, 8, jerarquia, 0, new Point()); Core.circle(resultado, mc.get(i), 4, color, -1, 8, 0); } return mc; }