List of usage examples for org.opencv.core Mat height
public int height()
From source file:ImagemScreenAtual.java
public static BufferedImage createBufferedImage(Mat mat) { BufferedImage image = new BufferedImage(mat.width(), mat.height(), BufferedImage.TYPE_3BYTE_BGR); WritableRaster raster = image.getRaster(); DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer(); byte[] data = dataBuffer.getData(); mat.get(0, 0, data);//w w w .j a v a 2 s. c o m return image; }
From source file:frmMain.java
public static BufferedImage mat2Img(Mat in) { BufferedImage out;/*w w w .j a va 2 s. c om*/ byte[] data = new byte[in.width() * in.height() * (int) in.elemSize()]; int type; in.get(0, 0, data); if (in.channels() == 1) type = BufferedImage.TYPE_BYTE_GRAY; else type = BufferedImage.TYPE_3BYTE_BGR; out = new BufferedImage(in.width(), in.height(), type); out.getRaster().setDataElements(0, 0, in.width(), in.height(), data); return out; }
From source file:TelaTakeFoto.java
/** * Metodo createBufferedImage//from ww w .j a va2s . c o m * @param mat * @return */ public static BufferedImage createBufferedImage(Mat mat) { BufferedImage image = new BufferedImage(mat.width(), mat.height(), BufferedImage.TYPE_3BYTE_BGR); WritableRaster raster = image.getRaster(); DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer(); byte[] data = dataBuffer.getData(); mat.get(0, 0, data); return image; }
From source file:OCV__LoadLibrary.java
License:Open Source License
/** * a CV_8UC3 data of OpenCV -> a color data of ImageJ. * @param src_cv_8uc3 a CV_8UC3 data of OpenCV * @param dst_ar a color data of ImageJ (int[]) * @param imw width of image/*from www . j a v a 2 s. c o m*/ * @param imh height of image */ public static void mat2intarray(Mat src_cv_8uc3, int[] dst_ar, int imw, int imh) { if ((src_cv_8uc3.width() != imw) || (src_cv_8uc3.height() != imh) || dst_ar.length != imw * imh) { IJ.error("Wrong image size"); } for (int y = 0; y < imh; y++) { for (int x = 0; x < imw; x++) { byte[] dst_cv_8uc3_ele = new byte[3]; src_cv_8uc3.get(y, x, dst_cv_8uc3_ele); int b = (int) dst_cv_8uc3_ele[0]; int g = (int) dst_cv_8uc3_ele[1] << 8; int r = (int) dst_cv_8uc3_ele[2] << 16; int a = 0xff000000; dst_ar[x + imw * y] = b + g + r + a; } } }
From source file:OCV__LoadLibrary.java
License:Open Source License
/** * a color data of ImageJ -> a CV_8UC3 data of OpenCV * @param src_ar a color data of ImageJ (int[]) * @param dst_cv_8uc3 CV_8UC3 data of OpenCV * @param imw width of image// w w w . ja v a 2 s. c o m * @param imh height of image */ public static void intarray2mat(int[] src_ar, Mat dst_cv_8uc3, int imw, int imh) { if ((dst_cv_8uc3.width() != imw) || (dst_cv_8uc3.height() != imh) || src_ar.length != imw * imh) { IJ.error("Wrong image size"); } for (int y = 0; y < imh; y++) { for (int x = 0; x < imw; x++) { int ind = x + imw * y; byte b = (byte) (src_ar[ind] & 0xff); byte g = (byte) ((src_ar[ind] >> 8) & 0xff); byte r = (byte) ((src_ar[ind] >> 16) & 0xff); dst_cv_8uc3.put(y, x, new byte[] { b, g, r }); } } }
From source file:Face_Reco.java
public static void main(String args[]) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); VideoCapture camera = new VideoCapture(0); if (!camera.isOpened()) { System.out.println("Error"); } else {/*from www . j a v a 2 s .c o m*/ Mat frame = new Mat(); while (true) { if (camera.read(frame)) { System.out.println("Frame Obtained"); System.out.println("Captured Frame Width" + frame.width() + "Height" + frame.height()); Imgcodecs.imwrite("Camera.jpg", frame); Imgcodecs.imread("camera.jpg"); Imgcodecs.imread("camera.jpg", Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); System.out.println("Done!"); break; } } } camera.release(); }
From source file:LicenseDetection.java
public void run() { // ------------------ set up tesseract for later use ------------------ ITesseract tessInstance = new Tesseract(); tessInstance.setDatapath("/Users/BradWilliams/Downloads/Tess4J"); tessInstance.setLanguage("eng"); // ------------------ Save image first ------------------ Mat img;/*from w w w .jav a 2s .c om*/ img = Imgcodecs.imread(getClass().getResource("/resources/car_2_shopped2.jpg").getPath()); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/True_Image.png", img); // ------------------ Convert to grayscale ------------------ Mat imgGray = new Mat(); Imgproc.cvtColor(img, imgGray, Imgproc.COLOR_BGR2GRAY); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/Gray.png", imgGray); // ------------------ Blur so edge detection wont pick up noise ------------------ Mat imgGaussianBlur = new Mat(); Imgproc.GaussianBlur(imgGray, imgGaussianBlur, new Size(3, 3), 0); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/gaussian_blur.png", imgGaussianBlur); // ****************** Create image that will be cropped at end of program before OCR *************************** // ------------------ Binary theshold for OCR (used later)------------------ Mat imgThresholdOCR = new Mat(); Imgproc.adaptiveThreshold(imgGaussianBlur, imgThresholdOCR, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 7, 10); //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThresholdOCR.png", imgThresholdOCR); // ------------------ Erosion operation------------------ Mat kern = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3)); Mat imgErodeOCR = new Mat(); Imgproc.morphologyEx(imgThresholdOCR, imgErodeOCR, Imgproc.MORPH_DILATE, kern); //Imgproc.MORPH_DILATE is performing erosion, wtf? Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgErodeOCR.png", imgErodeOCR); //------------------ Dilation operation ------------------ Mat kernall = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3)); Mat imgDilateOCR = new Mat(); Imgproc.morphologyEx(imgErodeOCR, imgDilateOCR, Imgproc.MORPH_ERODE, kernall); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgDilateOCR.png", imgDilateOCR); // ************************************************************************************************************* // // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------ // Mat k = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(3, 3)); // Mat imgCloseOCR = new Mat(); // Imgproc.morphologyEx(imgThresholdOCR,imgCloseOCR,1,k); // Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgCloseOCR.png", imgCloseOCR); // ------------------ Sobel vertical edge detection ------------------ Mat imgSobel = new Mat(); Imgproc.Sobel(imgGaussianBlur, imgSobel, -1, 1, 0); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgSobel.png", imgSobel); // ------------------ Binary theshold ------------------ Mat imgThreshold = new Mat(); Imgproc.adaptiveThreshold(imgSobel, imgThreshold, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 99, -60); //Imgproc.threshold(imgSobel,imgThreshold,120,255,Imgproc.THRESH_TOZERO); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgThreshold.png", imgThreshold); // // ------------------ Open operation (erosion followed by dilation) ------------------ // Mat ker = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 2)); // Mat imgOpen = new Mat(); // Imgproc.morphologyEx(imgThreshold,imgOpen,0,ker); // Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgOpen.png", imgOpen); // ------------------ Close operation (dilation followed by erosion) to reduce noise ------------------ Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT, new Size(22, 8)); Mat imgClose = new Mat(); Imgproc.morphologyEx(imgThreshold, imgClose, 1, kernel); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgClose.png", imgClose); // ------------------ Find contours ------------------ List<MatOfPoint> contours = new ArrayList<>(); Imgproc.findContours(imgClose, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); // **************************** DEBUG CODE ************************** Mat contourImg = new Mat(imgClose.size(), imgClose.type()); for (int i = 0; i < contours.size(); i++) { Imgproc.drawContours(contourImg, contours, i, new Scalar(255, 255, 255), -1); } Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/contours.png", contourImg); // ****************************************************************** // -------------- Convert contours -------------------- //Convert to MatOfPoint2f so that minAreaRect can be called List<MatOfPoint2f> newContours = new ArrayList<>(); for (MatOfPoint mat : contours) { MatOfPoint2f newPoint = new MatOfPoint2f(mat.toArray()); newContours.add(newPoint); } //Get minAreaRects List<RotatedRect> minAreaRects = new ArrayList<>(); for (MatOfPoint2f mat : newContours) { RotatedRect rect = Imgproc.minAreaRect(mat); /* --------------- BUG WORK AROUND ------------ Possible bug: When converting from MatOfPoint2f to RotatectRect the width height were reversed and the angle was -90 degrees from what it would be if the width and height were correct. When painting rectangle in image, the correct boxes were produced, but performing calculations on rect.angle rect.width, or rect.height yielded unwanted results. The following work around is buggy but works for my purpose */ if (rect.size.width < rect.size.height) { double temp; temp = rect.size.width; rect.size.width = rect.size.height; rect.size.height = temp; rect.angle = rect.angle + 90; } //check aspect ratio and area and angle if (rect.size.width / rect.size.height > 1 && rect.size.width / rect.size.height < 5 && rect.size.width * rect.size.height > 10000 && rect.size.width * rect.size.height < 50000 && Math.abs(rect.angle) < 20) { minAreaRects.add(rect); } //minAreaRects.add(rect); } // **************************** DEBUG CODE ************************** /* The following code is used to draw the rectangles on top of the original image for debugging purposes */ //Draw Rotated Rects Point[] vertices = new Point[4]; Mat imageWithBoxes = img; // Draw color rectangles on top of binary contours // Mat imageWithBoxes = new Mat(); // Mat temp = imgDilateOCR; // Imgproc.cvtColor(temp, imageWithBoxes, Imgproc.COLOR_GRAY2RGB); for (RotatedRect rect : minAreaRects) { rect.points(vertices); for (int i = 0; i < 4; i++) { Imgproc.line(imageWithBoxes, vertices[i], vertices[(i + 1) % 4], new Scalar(0, 0, 255), 2); } } Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/imgWithBoxes.png", imageWithBoxes); // ****************************************************************** // **************************** DEBUG CODE ************************** // for(RotatedRect rect : minAreaRects) { // System.out.println(rect.toString()); // } // ****************************************************************** /* In order to rotate image without cropping it: 1. Create new square image with dimension = diagonal of initial image. 2. Draw initial image into the center of new image. Insert initial image at ROI (Region of Interest) in new image 3. Rotate new image */ //Find diagonal/hypotenuse int hypotenuse = (int) Math.sqrt((img.rows() * img.rows()) + (img.cols() * img.cols())); //New Mat with hypotenuse as height and width Mat rotateSpace = new Mat(hypotenuse, hypotenuse, 0); int ROI_x = (rotateSpace.width() - imgClose.width()) / 2; //x start of ROI int ROI_y = (rotateSpace.height() - imgClose.height()) / 2; //x start of ROI //designate region of interest Rect r = new Rect(ROI_x, ROI_y, imgClose.width(), imgClose.height()); //Insert image into region of interest imgDilateOCR.copyTo(rotateSpace.submat(r)); Mat rotatedTemp = new Mat(); //Mat to hold temporarily rotated mat Mat rectMat = new Mat();//Mat to hold rect contents (needed for looping through pixels) Point[] rectVertices = new Point[4];//Used to build rect to make ROI Rect rec = new Rect(); List<RotatedRect> edgeDensityRects = new ArrayList<>(); //populate new arraylist with rects that satisfy edge density int count = 0; //Loop through Rotated Rects and find edge density for (RotatedRect rect : minAreaRects) { count++; rect.center = new Point((float) ROI_x + rect.center.x, (float) ROI_y + rect.center.y); //rotate image to math orientation of rotated rect rotate(rotateSpace, rotatedTemp, rect.center, rect.angle); //remove rect rotation rect.angle = 0; //get vertices from rotatedRect rect.points(rectVertices); // **************************** DEBUG CODE ************************** // // for (int k = 0; k < 4; k++) { // System.out.println(rectVertices[k]); // Imgproc.line(rotatedTemp, rectVertices[k], rectVertices[(k + 1) % 4], new Scalar(0, 0, 255), 2); // } // // Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotated" + count + ".png", rotatedTemp); // ***************************************************************** //build rect to use as ROI rec = new Rect(rectVertices[1], rectVertices[3]); rectMat = rotatedTemp.submat(rec); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/extracted" + count + ".png", rectMat); //find edge density // // ------------------------ edge density check NOT IMPLEMENTED -------------------- // /* // Checking for edge density was not necessary for this image so it was not implemented due to lack of time // */ // for(int i = 0; i < rectMat.rows(); ++i){ // for(int j = 0; j < rectMat.cols(); ++j){ // // //add up white pixels // } // } // // //check number of white pixels against total pixels // //only add rects to new arraylist that satisfy threshold edgeDensityRects.add(rect); } // **************************** DEBUG CODE ************************** Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpace.png", rotateSpace); //Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rotatedSpaceROTATED.png", rotatedTemp); //System.out.println(imgGray.type()); // ***************************************************************** // if there is only one rectangle left, its the license plate if (edgeDensityRects.size() == 1) { String result = ""; //Hold result from OCR BufferedImage bimg; Mat cropped; cropped = rectMat.submat(new Rect(20, 50, rectMat.width() - 40, rectMat.height() - 70)); Imgcodecs.imwrite("/Users/BradWilliams/ComputerVisionOut/rectMatCropped.png", cropped); bimg = matToBufferedImage(cropped); BufferedImage image = bimg; try { result = tessInstance.doOCR(image); } catch (TesseractException e) { System.err.println(e.getMessage()); } for (int i = 0; i < 10; ++i) { } result = result.replace("\n", ""); System.out.println(result); CarProfDBImpl db = new CarProfDBImpl(); db.connect("localhost:3306/computer_vision", "root", "*******"); CarProf c = db.getCarProf(result); System.out.print(c.toString()); db.close(); } }
From source file:Retrive.java
public void reterives(String path, int n) { Retrive r = new Retrive(); Test1 ts = new Test1(); System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Frame f = new Frame(); FileDialog fd = new FileDialog(f, "input directory", FileDialog.LOAD); fd.setVisible(true);//from www .j a v a2 s . c om File directory = new File(fd.getDirectory()); File[] list = directory.listFiles(); int len = directory.listFiles().length; Mat[] img_corpse = new Mat[len]; Mat[] histo = new Mat[len]; for (int i = 0; i < len; i++) { img_corpse[i] = Highgui.imread(list[i].toString()); //Imgproc.cvtColor(img_corpse[i],img_corpse[i], Imgproc.COLOR_RGB2GRAY); System.out.println(list[i]); //System.out.println(img_corpse[i].dump()); histo[i] = r.histo(img_corpse[i]); } distanceofn nd = new distanceofn(); Mat query = Highgui.imread(path); Imgproc.cvtColor(query, query, Imgproc.COLOR_RGB2GRAY); Double[] distance = new Double[len]; Mat histquery = new Mat(); histquery = r.query_histo(query); for (int i = 0; i < len; i++) { r.RGBtoGRAY(query, img_corpse[i]); r.Preprocess(query, img_corpse[i]); System.out.println("size of query" + query.width() + query.height()); System.out.println("size of datacorpus" + img_corpse[i].width() + img_corpse[i].height()); //Imshow im = new Imshow("title"); //im.showImage(img_corpse[i]); distance[i] = r.Find_dist(histquery, histo[i]); //distance[i]=nd.ndistance(histquery, histo[i], 2); } for (int i = 0; i < len; i++) { System.out.println("distance of " + i + " " + distance[i]); } r.map(list, distance, len); }
From source file:M.java
/** * Call the real-time camera and resize the image to the size of * WIDTH*HEIGHT. The resized image is stored in the folder "img_resized". * * @throws Exception//from w w w. j a v a 2s .co m */ public static String realtimeCamera() throws Exception { System.out.println("Camera is called!"); String destPath = ""; System.loadLibrary(Core.NATIVE_LIBRARY_NAME); //or ... System.loadLibrary("opencv_java244"); //make the JFrame JFrame frame = new JFrame("WebCam Capture - Face detection"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); FaceDetector fd = new FaceDetector(); FacePanel facePanel = new FacePanel(); frame.setSize(400, 400); frame.setBackground(Color.BLUE); frame.add(facePanel, BorderLayout.CENTER); // frame.setVisible(true); facePanel.setVisible(true); facePanel.validate(); // Thread t = new Thread(); //Open and Read from the video stream Mat webcam_image = new Mat(); VideoCapture webCam = new VideoCapture(0); if (webCam.isOpened()) { // Thread.sleep(500); /// This one-time delay allows the Webcam to initialize itself while (M.flag) { webCam.read(webcam_image); if (!webcam_image.empty()) { // Thread.sleep(200); /// This delay eases the computational load .. with little performance leakage System.out.println("CAMERA: " + Thread.currentThread()); frame.setSize(webcam_image.width() + 40, webcam_image.height() + 60); //Apply the classifier to the captured image Mat temp = webcam_image; temp = fd.detect(webcam_image); //Display the image --------BUG facePanel.matToBufferedImage(temp); System.out.println("Image buffered!"); facePanel.repaint(); System.out.println("Panel repainted!"); System.out.println(facePanel.isVisible()); // System.out.println("visibility:"+facePanel.isVisible());//true // System.out.println("enabled?"+facePanel.isEnabled());//true // System.out.println("validity?"+facePanel.isValid());//true MatOfByte mb = new MatOfByte(); Highgui.imencode(".jpg", webcam_image, mb); BufferedImage image = ImageIO.read(new ByteArrayInputStream(mb.toArray())); destPath = "build\\classes\\cam_img\\capture.jpg"; File file = new File(destPath); ImageIO.write(image, "JPEG", file); } else { System.out.println(" --(!) No captured frame from webcam !"); break; } } } webCam.release(); //release the webcam String imgPath = resize(destPath); flag = true; frame.dispose(); return imgPath; }
From source file:arlocros.Utils.java
License:Apache License
static public Mat tresholdContrastBlackWhite(Mat srcImage, int filterBlockSize, double subtractedConstant, boolean invertBlackWhiteColor) { final Mat transformMat = new Mat(1, 3, CvType.CV_32F); final int row = 0; final int col = 0; transformMat.put(row, col, 0.33, 0.33, 0.34); final Mat grayImage = new Mat(srcImage.height(), srcImage.width(), CvType.CV_8UC1); Core.transform(srcImage, grayImage, transformMat); Mat thresholdedImage = new Mat(grayImage.height(), grayImage.width(), CvType.CV_8UC1); Imgproc.adaptiveThreshold(grayImage, thresholdedImage, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, filterBlockSize, subtractedConstant); grayImage.release();//from w w w . j a va 2 s . c o m if (invertBlackWhiteColor) { final Mat invertedImage = new Mat(thresholdedImage.height(), thresholdedImage.width(), CvType.CV_8UC1); Core.bitwise_not(thresholdedImage, invertedImage); thresholdedImage.release(); thresholdedImage = invertedImage; } final Mat coloredImage = new Mat(thresholdedImage.height(), thresholdedImage.width(), CvType.CV_8UC3); Imgproc.cvtColor(thresholdedImage, coloredImage, Imgproc.COLOR_GRAY2RGB); thresholdedImage.release(); return coloredImage; }