List of usage examples for org.opencv.core Mat empty
public boolean empty()
From source file:OCV_CntrlUvcCamera.java
License:Open Source License
@Override public void run(ImageProcessor arg0) { boolean bret = true; // ----- stop dialog during continuous grabbing ----- diag_free = new JDialog(diag_free, title, false); JButton but_stop_cont = new JButton("Stop"); but_stop_cont.addMouseListener(new MouseAdapter() { @Override//from ww w . j a v a 2 s . com public void mouseClicked(MouseEvent e) { flag_fin_loop = true; diag_free.dispose(); } }); diag_free.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { flag_fin_loop = true; } }); diag_free.add(but_stop_cont); diag_free.setSize(100, 75); // ----- end of stop dialog ----- // initialize camera VideoCapture src_cap = new VideoCapture(); Mat src_mat = new Mat(); bret = src_cap.open(device); if (!bret) { IJ.error("Camera initialization is failed."); diag_free.dispose(); return; } src_cap.set(CV_CAP_PROP_FRAME_WIDTH, width); src_cap.set(CV_CAP_PROP_FRAME_HEIGHT, height); // Setting the image display window width = (int) src_cap.get(CV_CAP_PROP_FRAME_WIDTH); height = (int) src_cap.get(CV_CAP_PROP_FRAME_HEIGHT); ImagePlus impDsp = IJ.createImage(title, width, height, 1, 24); int[] impdsp_intarray = (int[]) impDsp.getChannelProcessor().getPixels(); impDsp.show(); impDsp.setRoi(0, 0, impDsp.getWidth(), impDsp.getHeight()); // show stop dialog diag_free.setVisible(true); // run for (;;) { if (flag_fin_loop) { break; } // grab impDsp.startTiming(); bret = src_cap.read(src_mat); IJ.showTime(impDsp, impDsp.getStartTime(), title + " : "); if (!bret) { IJ.error("Error occurred in grabbing."); diag_free.dispose(); break; } if (src_mat.empty()) { IJ.error("Mat is empty."); diag_free.dispose(); break; } // display if (src_mat.type() == CvType.CV_8UC3) { OCV__LoadLibrary.mat2intarray(src_mat, impdsp_intarray, width, height); } else { IJ.error("Color camera is supported only."); diag_free.dispose(); break; } impDsp.draw(); // wait wait(wait_time); } diag_free.dispose(); if (src_cap.isOpened()) { src_cap.release(); } }
From source file:Ko.java
License:Open Source License
public static void main(String[] args) { String filename = args[0];//from w ww. ja v a 2s. c o m System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat original = Highgui.imread(filename); if (original.empty()) { System.out.println("Mat not successfully loaded."); System.exit(1); } KoBoard board = new KoBoard(original); saveImage(filename, original); }
From source file:M.java
/** * Call the real-time camera and resize the image to the size of * WIDTH*HEIGHT. The resized image is stored in the folder "img_resized". * * @throws Exception/*from w ww . j av a 2s . c om*/ */ public static String realtimeCamera() throws Exception { System.out.println("Camera is called!"); String destPath = ""; System.loadLibrary(Core.NATIVE_LIBRARY_NAME); //or ... System.loadLibrary("opencv_java244"); //make the JFrame JFrame frame = new JFrame("WebCam Capture - Face detection"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); FaceDetector fd = new FaceDetector(); FacePanel facePanel = new FacePanel(); frame.setSize(400, 400); frame.setBackground(Color.BLUE); frame.add(facePanel, BorderLayout.CENTER); // frame.setVisible(true); facePanel.setVisible(true); facePanel.validate(); // Thread t = new Thread(); //Open and Read from the video stream Mat webcam_image = new Mat(); VideoCapture webCam = new VideoCapture(0); if (webCam.isOpened()) { // Thread.sleep(500); /// This one-time delay allows the Webcam to initialize itself while (M.flag) { webCam.read(webcam_image); if (!webcam_image.empty()) { // Thread.sleep(200); /// This delay eases the computational load .. with little performance leakage System.out.println("CAMERA: " + Thread.currentThread()); frame.setSize(webcam_image.width() + 40, webcam_image.height() + 60); //Apply the classifier to the captured image Mat temp = webcam_image; temp = fd.detect(webcam_image); //Display the image --------BUG facePanel.matToBufferedImage(temp); System.out.println("Image buffered!"); facePanel.repaint(); System.out.println("Panel repainted!"); System.out.println(facePanel.isVisible()); // System.out.println("visibility:"+facePanel.isVisible());//true // System.out.println("enabled?"+facePanel.isEnabled());//true // System.out.println("validity?"+facePanel.isValid());//true MatOfByte mb = new MatOfByte(); Highgui.imencode(".jpg", webcam_image, mb); BufferedImage image = ImageIO.read(new ByteArrayInputStream(mb.toArray())); destPath = "build\\classes\\cam_img\\capture.jpg"; File file = new File(destPath); ImageIO.write(image, "JPEG", file); } else { System.out.println(" --(!) No captured frame from webcam !"); break; } } } webCam.release(); //release the webcam String imgPath = resize(destPath); flag = true; frame.dispose(); return imgPath; }
From source file:ac.robinson.ticqr.TickBoxImageParserTask.java
License:Apache License
@Override protected ArrayList<PointF> doInBackground(Void... unused) { Log.d(TAG, "Searching for tick boxes of " + mBoxSize + " size"); // we look for *un-ticked* boxes, rather than ticked, as they are uniform in appearance (and hence easier to // detect) - they show up as a box within a box ArrayList<PointF> centrePoints = new ArrayList<>(); int minimumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize, 2)); int maximumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize * 1.35f, 2)); int minimumInnerBoxArea = (int) Math.round(Math.pow(mBoxSize * 0.5f, 2)); // image adjustment - blurSize, blurSTDev and adaptiveThresholdSize must not be even numbers int blurSize = 9; int blurSTDev = 3; int adaptiveThresholdSize = Math.round(mBoxSize * 3); // (oddness ensured below) int adaptiveThresholdC = 4; // value to add to the mean (can be negative or zero) adaptiveThresholdSize = adaptiveThresholdSize % 2 == 0 ? adaptiveThresholdSize + 1 : adaptiveThresholdSize; // how similar the recognised polygon must be to its actual contour - lower is more similar float outerPolygonSimilarity = 0.045f; float innerPolygonSimilarity = 0.075f; // don't require as much accuracy for the inner part of the tick box // how large the maximum internal angle can be (e.g., for checking square shape) float maxOuterAngleCos = 0.3f; float maxInnerAngleCos = 0.4f; // use OpenCV to recognise boxes that have a box inside them - i.e. an un-ticked tick box // see: http://stackoverflow.com/a/11427501 // Bitmap newBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true); // not needed Mat bitMat = new Mat(); Utils.bitmapToMat(mBitmap, bitMat);/*w w w. j a v a2 s.co m*/ // blur and convert to grey // alternative (less flexible): Imgproc.medianBlur(bitMat, bitMat, blurSize); Imgproc.GaussianBlur(bitMat, bitMat, new Size(blurSize, blurSize), blurSTDev, blurSTDev); Imgproc.cvtColor(bitMat, bitMat, Imgproc.COLOR_RGB2GRAY); // need 8uC1 (1 channel, unsigned char) image type // perform adaptive thresholding to detect edges // alternative (slower): Imgproc.Canny(bitMat, bitMat, 10, 20, 3, false); Imgproc.adaptiveThreshold(bitMat, bitMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, adaptiveThresholdSize, adaptiveThresholdC); // get the contours in the image, and their hierarchy Mat hierarchyMat = new Mat(); List<MatOfPoint> contours = new ArrayList<>(); Imgproc.findContours(bitMat, contours, hierarchyMat, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); if (DEBUG) { Imgproc.drawContours(bitMat, contours, -1, new Scalar(30, 255, 255), 1); } // parse the contours and look for a box containing another box, with similar enough sizes int numContours = contours.size(); ArrayList<Integer> searchedContours = new ArrayList<>(); Log.d(TAG, "Found " + numContours + " possible tick box areas"); if (numContours > 0 && !hierarchyMat.empty()) { for (int i = 0; i < numContours; i++) { // the original detected contour MatOfPoint boxPoints = contours.get(i); // hierarchy key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num int childBox = (int) hierarchyMat.get(0, i)[2]; // usually the largest child (as we're doing RETR_TREE) if (childBox == -1) { // we only want elements that have children continue; } else { if (searchedContours.contains(childBox)) { if (DEBUG) { Log.d(TAG, "Ignoring duplicate box at first stage: " + childBox); } continue; } else { searchedContours.add(childBox); } } // discard smaller (i.e. noise) outer box areas as soon as possible for speed // used to do Imgproc.isContourConvex(outerPoints) later, but the angle check covers this, so no need double originalArea = Math.abs(Imgproc.contourArea(boxPoints)); if (originalArea < minimumOuterBoxArea) { // if (DEBUG) { // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1); // Log.d(TAG, "Outer box too small"); // } continue; } if (originalArea > maximumOuterBoxArea) { // if (DEBUG) { // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1); // Log.d(TAG, "Outer box too big"); // } continue; } // simplify the contours of the outer box - we want to detect four-sided shapes only MatOfPoint2f boxPoints2f = new MatOfPoint2f(boxPoints.toArray()); // Point2f for approxPolyDP Imgproc.approxPolyDP(boxPoints2f, boxPoints2f, outerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true); // simplify the contour if (boxPoints2f.height() != 4) { // height is number of points if (DEBUG) { // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1); Log.d(TAG, "Outer box not 4 points"); } continue; } // check that the simplified outer box is approximately a square, angle-wise org.opencv.core.Point[] boxPointsArray = boxPoints2f.toArray(); double maxCosine = 0; for (int j = 0; j < 4; j++) { org.opencv.core.Point pL = boxPointsArray[j]; org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4]; org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4]; getLineAngle(pL, pIntersect, pR); maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR)); } if (maxCosine > maxOuterAngleCos) { if (DEBUG) { // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1); Log.d(TAG, "Outer angles not square enough"); } continue; } // check that the simplified outer box is approximately a square, line length-wise double minLine = Double.MAX_VALUE; double maxLine = 0; for (int p = 1; p < 4; p++) { org.opencv.core.Point p1 = boxPointsArray[p - 1]; org.opencv.core.Point p2 = boxPointsArray[p]; double xd = p1.x - p2.x; double yd = p1.y - p2.y; double lineLength = Math.sqrt((xd * xd) + (yd * yd)); minLine = Math.min(minLine, lineLength); maxLine = Math.max(maxLine, lineLength); } if (maxLine - minLine > minLine) { if (DEBUG) { // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1); Log.d(TAG, "Outer lines not square enough"); } continue; } // draw the outer box if debugging if (DEBUG) { MatOfPoint debugBoxPoints = new MatOfPoint(boxPointsArray); Log.d(TAG, "Potential tick box: " + boxPoints2f.size() + ", " + "area: " + Math.abs(Imgproc.contourArea(debugBoxPoints)) + " (min:" + minimumOuterBoxArea + ", max:" + maximumOuterBoxArea + ")"); drawPoints(bitMat, debugBoxPoints, new Scalar(50, 255, 255), 2); } // loop through the children - they should be in descending size order, but sometimes this is wrong boolean wrongBox = false; while (true) { if (DEBUG) { Log.d(TAG, "Looping with box: " + childBox); } // we've previously tried a child - try the next one // key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num if (wrongBox) { childBox = (int) hierarchyMat.get(0, childBox)[0]; if (childBox == -1) { break; } if (searchedContours.contains(childBox)) { if (DEBUG) { Log.d(TAG, "Ignoring duplicate box at loop stage: " + childBox); } break; } else { searchedContours.add(childBox); } //noinspection UnusedAssignment wrongBox = false; } // perhaps this is the outer box - check its child has no children itself // (removed so tiny children (i.e. noise) don't mean we mis-detect an un-ticked box as ticked) // if (hierarchyMat.get(0, childBox)[2] != -1) { // continue; // } // check the size of the child box is large enough boxPoints = contours.get(childBox); originalArea = Math.abs(Imgproc.contourArea(boxPoints)); if (originalArea < minimumInnerBoxArea) { if (DEBUG) { // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1); Log.d(TAG, "Inner box too small"); } wrongBox = true; continue; } // simplify the contours of the inner box - again, we want four-sided shapes only boxPoints2f = new MatOfPoint2f(boxPoints.toArray()); Imgproc.approxPolyDP(boxPoints2f, boxPoints2f, innerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true); if (boxPoints2f.height() != 4) { // height is number of points // if (DEBUG) { // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1); // } Log.d(TAG, "Inner box fewer than 4 points"); // TODO: allow > 4 for low quality images? wrongBox = true; continue; } // check that the simplified inner box is approximately a square, angle-wise // higher tolerance because noise means if we get several inners, the box may not be quite square boxPointsArray = boxPoints2f.toArray(); maxCosine = 0; for (int j = 0; j < 4; j++) { org.opencv.core.Point pL = boxPointsArray[j]; org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4]; org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4]; getLineAngle(pL, pIntersect, pR); maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR)); } if (maxCosine > maxInnerAngleCos) { Log.d(TAG, "Inner angles not square enough"); wrongBox = true; continue; } // this is probably an inner box - log if debugging if (DEBUG) { Log.d(TAG, "Un-ticked inner box: " + boxPoints2f.size() + ", " + "area: " + Math.abs(Imgproc.contourArea(new MatOfPoint2f(boxPointsArray))) + " (min: " + minimumInnerBoxArea + ")"); } // find the inner box centre double centreX = (boxPointsArray[0].x + boxPointsArray[1].x + boxPointsArray[2].x + boxPointsArray[3].x) / 4f; double centreY = (boxPointsArray[0].y + boxPointsArray[1].y + boxPointsArray[2].y + boxPointsArray[3].y) / 4f; // draw the inner box if debugging if (DEBUG) { drawPoints(bitMat, new MatOfPoint(boxPointsArray), new Scalar(255, 255, 255), 1); Core.circle(bitMat, new org.opencv.core.Point(centreX, centreY), 3, new Scalar(255, 255, 255)); } // add to the list of boxes to check centrePoints.add(new PointF((float) centreX, (float) centreY)); break; } } } Log.d(TAG, "Found " + centrePoints.size() + " un-ticked boxes"); return centrePoints; }
From source file:acseg.reconocimiento.matriculas.FXMLReconocimientoMatriculasController.java
/** * Get a frame from the opened video stream (if any) * * @return the {@link Mat} to show/*from w w w .j ava2 s.c om*/ */ private Mat grabFrame() { //Iniciamos el frme Mat frame = new Mat(); //Verificamos si la camara esta activa para grabar if (this.capture.isOpened()) { try { //leemos el frame actual this.capture.read(frame); //si el frame no esta vacio lo procesa if (!frame.empty()) { fm = frame; } } catch (Exception e) { System.err.println("Exception during the image elaboration: " + e); } } return frame; }
From source file:br.cefetmg.lsi.opencv.multipleObjectTracking.processing.MultipleObjectTracking.java
License:Open Source License
public void startTracking() throws Exception { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); mountFrames();/*from w w w .j a va 2s . com*/ // Matrices for image processing. Mat image = new Mat(); Mat thresholdedImage = new Mat(); Mat hsvImage = new Mat(); // Opens camera capture flow. VideoCapture capture = null; String imagesource = PropertiesLoaderImpl.getValor("multipleObjectTracking.imagesource"); if (imagesource.equalsIgnoreCase("webcam")) { capture = new VideoCapture(0); } else { if (imagesource.equalsIgnoreCase("ipcam")) { String ipcamAddress = PropertiesLoaderImpl .getValor("multipleObjectTracking.imagesource.ipcam.address"); capture = new VideoCapture(ipcamAddress); } } if (capture == null) { throw new Exception("Could not conect to camera."); } // Captures one image, for starting the process. try { capture.read(image); } catch (Exception e) { throw new Exception("Could not read from camera. Maybe the URL is not correct."); } setFramesSizes(image); if (capture.isOpened()) { while (true) { capture.read(image); if (!image.empty()) { Imgproc.cvtColor(image, hsvImage, Imgproc.COLOR_BGR2HSV); if (calibrationMode) { thresholdedImage = processImage(hsvImage, new Scalar(calibrationWindow.getMinHValue(), calibrationWindow.getMinSValue(), calibrationWindow.getMinVValue()), new Scalar(calibrationWindow.getMaxHValue(), calibrationWindow.getMaxSValue(), calibrationWindow.getMaxVValue())); trackFilteredObject(null, thresholdedImage, image); updateFrames(image, thresholdedImage); } else { Ball redBall = new Ball(Ball.Colours.RED); Ball greenBall = new Ball(Ball.Colours.GREEN); Ball blueBall = new Ball(Ball.Colours.BLUE); ArrayList<Ball> balls = new ArrayList<Ball>(); balls.add(redBall); balls.add(greenBall); balls.add(blueBall); for (Ball ball : balls) { thresholdedImage = processImage(hsvImage, ball.getHsvMin(), ball.getHsvMax()); trackFilteredObject(ball, thresholdedImage, image); updateFrames(image, thresholdedImage); } } } else { throw new Exception("Could not read camera image."); } } } else { throw new Exception("Could not read from camera."); } }
From source file:by.zuyeu.deyestracker.core.detection.DemoPanel.java
public static void main(String arg[]) throws DEyesTrackerException, InterruptedException, ExecutionException { LOG.info("main - start;"); final String windowName = "Capture - Face detection"; final JFrame frame = new JFrame(windowName); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setSize(400, 400);//from w ww .j av a 2 s. c om final DemoPanel demoPanel = new DemoPanel(); frame.setContentPane(demoPanel); frame.setVisible(true); //-- 2. Read the video stream final FaceInfoSampler sampler = new FaceInfoSampler(); final IFrameCapture capture = sampler.getCapture(); final Scalar faceRegionColor = new Scalar(0, 255, 0); final Scalar eyesRegionColor = new Scalar(120, 120, 120); final ExecutorService executorService = Executors.newSingleThreadExecutor(); FutureTask<DetectFaceSample> detectFaceTask = TaskUtils.wrapFutureAnd(new DetectTask(sampler), executorService); DetectFaceSample sample = new DetectFaceSample(); while (true) { final Mat webcamImage = capture.getNextFrame(); if (webcamImage != null && !webcamImage.empty()) { frame.setSize(webcamImage.width() + 40, webcamImage.height() + 60); if (detectFaceTask.isDone()) { sample = detectFaceTask.get(); detectFaceTask = TaskUtils.wrapFutureAnd(new DetectTask(sampler), executorService); } if (sample.getFace() != null) { addRectangleToImage(sample.getFace(), webcamImage, faceRegionColor); } if (sample.getLeftEye() != null) { addRectangleToImage(sample.getLeftEye(), webcamImage, eyesRegionColor); } if (sample.getRightEye() != null) { addRectangleToImage(sample.getRightEye(), webcamImage, eyesRegionColor); } if (sample.getLeftPupil() != null) { drawCircle(webcamImage, sample.getLeftPupil()); } if (sample.getRightPupil() != null) { drawCircle(webcamImage, sample.getRightPupil()); } //-- 4. Display the image demoPanel.convertMatToBufferedImage(webcamImage); // We could look at the error... demoPanel.repaint(); } } }
From source file:by.zuyeu.deyestracker.core.video.capture.CameraFrameCapture.java
private void startCapturing() { LOG.trace("startCapturing() - start;"); while (!isCanceled) { final Mat webcamImage = new Mat(); capture.read(webcamImage);/*from w ww. jav a 2 s . c o m*/ if (!webcamImage.empty()) { safeAddCapture(webcamImage); } } LOG.trace("startCapturing() - end;"); }
From source file:carmelo.CameraTask.java
@Override protected Image call() throws Exception { // omitir si la camara no esta abierta if (!capture.isOpened()) { return null; }//from w ww. j a v a 2s . c o m // obtiene la captura de la camara, lo almacena el frame Mat frame = new Mat(); capture.read(frame); // verificar si es una captura valida if (!frame.empty()) { // procesar y convertir la imagen Mat dst = imgproc.apply(frame); return createImageFromMat(dst); } return null; }
From source file:cctvanalization.FXMLDocumentController.java
private Image grabFrame() { if (applicationShouldClose) { if (videoCapture.isOpened()) { videoCapture.release();//from w ww .java 2 s . c o m } scheduledExecutorService.shutdown(); } Image imageToShow = null; Mat frame = new Mat(); // Mat prevFrame = new Mat(grabbedImagesPrev.get(grabbedImagesPrev.size() - 1)); int frameNum = 0; if (videoCapture.isOpened()) { try { videoCapture.read(frame); if (!frame.empty()) { Imgproc.cvtColor(frame, frame, Imgproc.COLOR_BGR2GRAY); MatOfByte buffer = new MatOfByte(); Imgcodecs.imencode(".png", frame, buffer); imageToShow = new Image(new ByteArrayInputStream(buffer.toArray())); grabbedFramesTemp.removeAll(grabbedFramesTemp); if (grabbedFramesPrev.size() < 10) { grabbedFramesPrev.add(frame); } else { for (int i = 1; i < grabbedFramesPrev.size(); i++) { grabbedFramesTemp.add(grabbedFramesPrev.get(i)); } grabbedFramesPrev.removeAll(grabbedFramesPrev); for (int i = 0; i < grabbedFramesTemp.size(); i++) { grabbedFramesPrev.add(grabbedFramesTemp.get(i)); } grabbedFramesPrev.add(frame); } } } catch (Exception e) { System.err.println(e); } } return imageToShow; }