List of usage examples for org.opencv.videoio VideoCapture isOpened
public boolean isOpened()
From source file:OCV_CntrlUvcCamera.java
License:Open Source License
@Override public void run(ImageProcessor arg0) { boolean bret = true; // ----- stop dialog during continuous grabbing ----- diag_free = new JDialog(diag_free, title, false); JButton but_stop_cont = new JButton("Stop"); but_stop_cont.addMouseListener(new MouseAdapter() { @Override//w w w .jav a2 s . com public void mouseClicked(MouseEvent e) { flag_fin_loop = true; diag_free.dispose(); } }); diag_free.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { flag_fin_loop = true; } }); diag_free.add(but_stop_cont); diag_free.setSize(100, 75); // ----- end of stop dialog ----- // initialize camera VideoCapture src_cap = new VideoCapture(); Mat src_mat = new Mat(); bret = src_cap.open(device); if (!bret) { IJ.error("Camera initialization is failed."); diag_free.dispose(); return; } src_cap.set(CV_CAP_PROP_FRAME_WIDTH, width); src_cap.set(CV_CAP_PROP_FRAME_HEIGHT, height); // Setting the image display window width = (int) src_cap.get(CV_CAP_PROP_FRAME_WIDTH); height = (int) src_cap.get(CV_CAP_PROP_FRAME_HEIGHT); ImagePlus impDsp = IJ.createImage(title, width, height, 1, 24); int[] impdsp_intarray = (int[]) impDsp.getChannelProcessor().getPixels(); impDsp.show(); impDsp.setRoi(0, 0, impDsp.getWidth(), impDsp.getHeight()); // show stop dialog diag_free.setVisible(true); // run for (;;) { if (flag_fin_loop) { break; } // grab impDsp.startTiming(); bret = src_cap.read(src_mat); IJ.showTime(impDsp, impDsp.getStartTime(), title + " : "); if (!bret) { IJ.error("Error occurred in grabbing."); diag_free.dispose(); break; } if (src_mat.empty()) { IJ.error("Mat is empty."); diag_free.dispose(); break; } // display if (src_mat.type() == CvType.CV_8UC3) { OCV__LoadLibrary.mat2intarray(src_mat, impdsp_intarray, width, height); } else { IJ.error("Color camera is supported only."); diag_free.dispose(); break; } impDsp.draw(); // wait wait(wait_time); } diag_free.dispose(); if (src_cap.isOpened()) { src_cap.release(); } }
From source file:Face_Reco.java
public static void main(String args[]) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); VideoCapture camera = new VideoCapture(0); if (!camera.isOpened()) { System.out.println("Error"); } else {//from w ww. ja va 2s . c o m Mat frame = new Mat(); while (true) { if (camera.read(frame)) { System.out.println("Frame Obtained"); System.out.println("Captured Frame Width" + frame.width() + "Height" + frame.height()); Imgcodecs.imwrite("Camera.jpg", frame); Imgcodecs.imread("camera.jpg"); Imgcodecs.imread("camera.jpg", Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); System.out.println("Done!"); break; } } } camera.release(); }
From source file:com.armeniopinto.stress.control.vision.VisionConfig.java
@Bean(name = "visionDevice", destroyMethod = "release") public VideoCapture videoCapture() throws VisionException { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); LOGGER.debug("OpenCV native library loaded."); final VideoCapture device = new VideoCapture(deviceId) { @Override//from w w w. j ava 2 s . c om public void release() { super.release(); LOGGER.debug("Vision device stopped."); } }; device.set(CAP_PROP_FRAME_WIDTH, width); device.set(CAP_PROP_FRAME_HEIGHT, height); try { TimeUnit.MICROSECONDS.sleep(2000L); } catch (final InterruptedException ie) { LOGGER.warn("Failed to sleep!", ie); } if (!device.isOpened()) { throw new VisionException("Unable to open the camera."); } final int actualWidth = (int) device.get(CAP_PROP_FRAME_WIDTH); final int actualHeight = (int) device.get(CAP_PROP_FRAME_HEIGHT); if (actualWidth != width) { LOGGER.warn(String.format("Requested frame width %d but got %d instead.", width, actualWidth)); } if (actualHeight != height) { LOGGER.warn(String.format("Requested frame height %d but got %d instead.", height, actualHeight)); } LOGGER.debug(String.format("Vision device %d started at %dx%d.", deviceId, actualWidth, actualHeight)); return device; }
From source file:com.randhirkumar.webcam.MainFrameForm.java
public void displayScreen() { Mat webcamImage = new Mat(); VideoCapture videoCapture = new VideoCapture(0); if (videoCapture.isOpened()) { while (true) { videoCapture.read(webcamImage); if (!webcamImage.empty()) { setSize(webcamImage.width() + 50, webcamImage.height() + 70); webcamImage = processor.detect(webcamImage); cameraPanel.convertMatToImage(webcamImage); cameraPanel.repaint();/*w w w. j a va2 s. c o m*/ } else { System.out.println("Problem"); break; } } } }
From source file:edu.wpi.first.wpilibj.vision.GripRunner.java
License:Open Source License
/** * Make a connection to a camera./*from ww w . j a va2 s. co m*/ * * @param device Camera number. * @param width Window width in pixels. * @param height Window height in pixels. * @param exposure Relative exposure. * @return */ public static VideoCapture makeCamera(int device, int width, int height, double exposure) { VideoCapture camera = new VideoCapture(0); camera.set(Videoio.CAP_PROP_FRAME_WIDTH, width); camera.set(Videoio.CAP_PROP_FRAME_HEIGHT, height); if (exposure > -1.0) { System.out.println("\t" + exposure); camera.set(Videoio.CAP_PROP_AUTO_EXPOSURE, 0); camera.set(Videoio.CAP_PROP_EXPOSURE, exposure); } if (!camera.isOpened()) { throw new RuntimeException("Camera will not open"); } return camera; }
From source file:FaceRecog.App.java
private void runMainLoop(String[] args) { ImageProcessor imageProcessor = new ImageProcessor(); Mat webcamMatImage = new Mat(); Image tempImage;//from ww w . j a v a 2s . c om VideoCapture capture = new VideoCapture(0); capture.set(Videoio.CAP_PROP_FRAME_WIDTH, 320); capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, 240); if (capture.isOpened()) { while (true) { capture.read(webcamMatImage); if (!webcamMatImage.empty()) { tempImage = imageProcessor.toBufferedImage(webcamMatImage); ImageIcon imageIcon = new ImageIcon(tempImage, "Captured video"); imageLabel.setIcon(imageIcon); frame.pack(); //this will resize the window to fit the image } else { System.out.println(" -- Frame not captured -- Break!"); break; } } } else { System.out.println("Couldn't open capture."); } }
From source file:formularios.FrmCamera.java
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed // TODO add your handling code here: System.out.println("Hello, OpenCV"); // Load the native library. System.out.println(System.getProperty("java.library.path")); System.loadLibrary("opencv-300"); VideoCapture camera = new VideoCapture(0); camera.open(0); //Useless if (!camera.isOpened()) { System.out.println("Camera Error"); } else {/*w w w.jav a 2 s . c o m*/ System.out.println("Camera OK?"); } Mat frame = new Mat(); //camera.grab(); //System.out.println("Frame Grabbed"); //camera.retrieve(frame); //System.out.println("Frame Decoded"); camera.read(frame); System.out.println("Frame Obtained"); /* No difference camera.release(); */ System.out.println("Captured Frame Width " + frame.width()); Imgcodecs.imwrite("camera.jpg", frame); System.out.println("OK"); }
From source file:io.github.jakejmattson.facialrecognition.FacialRecognition.java
License:Open Source License
private static void capture() { File classifier = new File("lbpcascade_frontalface_improved.xml"); if (!classifier.exists()) { displayFatalError("Unable to find classifier!"); return;//www . java 2s. co m } CascadeClassifier faceDetector = new CascadeClassifier(classifier.toString()); VideoCapture camera = new VideoCapture(0); if (!camera.isOpened()) { displayFatalError("No camera detected!"); return; } if (!DATABASE.exists()) DATABASE.mkdir(); ImageFrame frame = new ImageFrame(); while (frame.isOpen() && camera.isOpened()) { Mat rawImage = new Mat(); camera.read(rawImage); Mat newImage = detectFaces(rawImage, faceDetector, frame); frame.showImage(newImage); } camera.release(); }
From source file:org.usfirst.frc.team5066.controller2017.GripRunner.java
License:Open Source License
/** * Make a connection to a camera./*from w w w . ja va 2 s . c o m*/ * * @param device Camera number. * @param width Window width in pixels. * @param height Window height in pixels. * @param exposure Relative exposure. * @return */ public static VideoCapture makeCamera(int device, int width, int height, double exposure) { VideoCapture camera = new VideoCapture(1); camera.set(Videoio.CAP_PROP_FRAME_WIDTH, width); camera.set(Videoio.CAP_PROP_FRAME_HEIGHT, height); if (exposure > -1.0) { System.out.println("\t" + exposure); camera.set(Videoio.CAP_PROP_AUTO_EXPOSURE, 0); camera.set(Videoio.CAP_PROP_EXPOSURE, exposure); } if (!camera.isOpened()) { throw new RuntimeException("Camera will not open"); } return camera; }
From source file:readnumber.ReadNumber.java
/** * // w w w . java 2 s . c om * @param args runMainLoop - VideoCapture */ private void runMainLoop(String[] args) { imageProcessor = new ImageProcessor(); webcamMatImage = new Mat(); Image tempImage; VideoCapture capture = new VideoCapture(0); capture.set(Videoio.CAP_PROP_FRAME_WIDTH, 320); capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, 240); // Create a face detector from the cascade file faceDetector = new CascadeClassifier(CascadeFile); if (capture.isOpened()) { while (true) { capture.read(webcamMatImage); if (!webcamMatImage.empty()) { // Output video to form (JLabel) //imageLabel.setBounds(0, 61, 320, 240); tempImage = imageProcessor.toBufferedImage(webcamMatImage); ImageIcon imageIcon = new ImageIcon(tempImage, "Captured video"); imageLabel.setIcon(imageIcon); Buttonl.setBounds(10, 0, 140, 30); //Button2.setBounds(140, 0, 120, 30); message.setBounds(10, 30, 200, 30); inform.setBounds(10, 315, 200, 30); caunttext.setBounds(100, 315, 200, 30); textVideo.setBounds(10, 295, 120, 30); //frame.pack(); //this will resize the window to fit the image } else { System.out.println(" -- Frame not captured -- Break!"); break; } } } else { System.out.println("Couldn't open capture."); } }