List of usage examples for org.opencv.core Mat empty
public boolean empty()
From source file:com.randhirkumar.webcam.MainFrameForm.java
public void displayScreen() { Mat webcamImage = new Mat(); VideoCapture videoCapture = new VideoCapture(0); if (videoCapture.isOpened()) { while (true) { videoCapture.read(webcamImage); if (!webcamImage.empty()) { setSize(webcamImage.width() + 50, webcamImage.height() + 70); webcamImage = processor.detect(webcamImage); cameraPanel.convertMatToImage(webcamImage); cameraPanel.repaint();/* w ww .j a va2 s .c om*/ } else { System.out.println("Problem"); break; } } } }
From source file:com.sikulix.api.Image.java
License:Open Source License
private Mat get(URL url) { Mat mContent = new Mat(); if (SX.isSet(url)) { urlImg = url;// ww w . ja va2 s .co m if (isCaching()) { mContent = imageFiles.get(urlImg); if (SX.isNull(mContent)) { mContent = new Mat(); } } if (mContent.empty()) { mContent = get(); } if (isCaching() && !mContent.empty()) { changeCache(true, urlImg, content); } } return mContent; }
From source file:com.sikulix.api.Image.java
License:Open Source License
private Mat get() { Mat mContent = new Mat(); if (urlImg != null) { File imgFile = new File(urlImg.getPath()); mContent = Highgui.imread(imgFile.getAbsolutePath()); if (!mContent.empty()) { log.debug("get: loaded: (%dx%s) %s", mContent.width(), mContent.height(), urlImg); } else {/*from www .j a v a2s . co m*/ log.error("get: not loaded: %s", urlImg); } } return mContent; }
From source file:com.ttolley.pongbot.opencv.CvWorker.java
@Override protected Void doInBackground() throws Exception { try {//from w w w . j ava 2 s . c om //-- 2. Read the video stream Mat webcam_image = new Mat(); if (capture.isOpened()) { while (true) { capture.read(webcam_image); if (!webcam_image.empty()) { PublishObject publishObject = new PublishObject(webcam_image); for (Map.Entry<FilterType, Filter> entry : filters.entrySet()) { Mat hsv_image = new Mat(); Mat thresholded = new Mat(); Filter filter = entry.getValue(); // One way to select a range of colors by Hue Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV); Core.inRange(hsv_image, filter.hsv_min, filter.hsv_max, thresholded); // Morph open final Size erodeSizeObj = new Size(filter.erodeSize, filter.erodeSize); final Size dilateSizeObj = new Size(filter.dilateSize, filter.dilateSize); Imgproc.erode(thresholded, thresholded, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, erodeSizeObj)); Imgproc.dilate(thresholded, thresholded, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, erodeSizeObj)); // Morph close Imgproc.dilate(thresholded, thresholded, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, dilateSizeObj)); Imgproc.erode(thresholded, thresholded, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, dilateSizeObj)); Mat temp = new Mat(); thresholded.copyTo(temp); List<MatOfPoint> contours = new ArrayList(); Mat heirarchy = new Mat(); Imgproc.findContours(temp, contours, heirarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); FilteredObject.Target largestTarget = findTarget(contours, webcam_image, filter); publishObject.addObject(entry.getKey(), new FilteredObject(largestTarget, thresholded)); } publish(publishObject); } else { System.out.println(" --(!) No captured frame -- Break!"); break; } } } } catch (Exception ex) { System.out.println("Unable to loop"); System.out.println(getStackTrace(ex)); } return null; }
From source file:Controladores.CtrlInterfazPrincipal.java
public void iniciarCaptura() { capturador = new Capturador(0); if (capturador.conectarCamara()) { reconocedorCara = new ReconocedorCara(); ArrayList<Cara> vectorCaras; Mat imagenMat; while (true) { imagenMat = capturador.getImagen(); if (imagenMat != null && !imagenMat.empty()) { vectorCaras = reconocedorCara.detectarCaras(imagenMat); if (vectorCaras.size() >= 1) { this.interfazPrincipal.setLblImagenEncontrada(convertir(vectorCaras.get(0).getImagen())); this.clasificador.getLegajo(vectorCaras); this.interfazPrincipal.setLblLegajo(vectorCaras.get(0).getLegajo()); // putText(img, "OpenCV 2", Point(180,320), FONT_HERSHEY_SCRIPT_COMPLEX, 3, CV_RGB(125,12,145), 2); }/*from ww w.j ava 2 s . co m*/ this.interfazPrincipal.setLblImagenCamara(convertir(imagenMat)); } } } else { //this.interfazPrincipal.setLabelValidacion("No se pudo establecer conexion con la camara"); } }
From source file:controller.SearchController.java
@Override public void processRequest(HttpServletRequest request, HttpServletResponse response, UriUtils uri) { if (searchAlg == null) { Controller.setQuickResponseMessage(500, "error", "Servlet not trained!", response); FileSlaveServlet.writeToLog(//from w ww . ja v a 2 s . c o m "<ERROR> SearchController.processRequest() : searchAlg == null | FileSlave not trained!"); return; } //TODO: read image from request and give it to search algorithm ////////////////////// BufferedImage temp; try { temp = ImageIO.read(request.getInputStream()); if (temp == null) { Controller.setQuickResponseMessage(400, "error", "Please send a request with a valid image/xxx Content-Type", response); return; } // ImageIO.write(temp, "jpg", new File("C:\\Users\\Vlad\\Desktop\\testoutput.jpg")); } catch (IOException e) { Controller.setQuickResponseMessage(400, "error", "Please send a request with a valid image/xxx Content-Type", response); e.printStackTrace(); return; } Mat inputImage = ImageUtils.Convert_BufferedImage2Mat_BGR(temp); if (inputImage == null || inputImage.empty()) { Controller.setQuickResponseMessage(400, "error", "Please send a image in the request body!", response); return; } // Imgcodecs.imwrite("C:\\Users\\Vlad\\Desktop\\testoutput.jpg", inputImage); List<ImageModel> results = null; Classifier classifier = searchAlg.getClassifier(); Instance inputInstance = classifier.getInstanceObject(searchAlg.extractFeatures(inputImage)); SearchSystemDAO searchDAO = ((SearchSystemDAO) FileSlaveServlet.daoFactory .getDAOInstance(DAOFactory.SEARCH_SYSTEM_DAO)); try { // results = searchDAO.searchFiles(inputImage, 10, searchAlg); // long time = System.currentTimeMillis(); //results = searchDAO.getImagesFromCluster(classifier.classifie(inputInstance)); results = searchDAO.getImagesFromClusters(classifier.getFirstNClasses(inputInstance, 2)); searchAlg.sortData(results, inputInstance); // System.out.printf("[client] Time for training [%d]\r\n", (System.currentTimeMillis() - time)); } catch (ClassNotFoundException ex) { Controller.setQuickResponseMessage(500, "error", "Server error, working to fix it!", response); FileSlaveServlet.writeToLog( "<ERROR> SearchController.processRequest() : ClassNotFoundException(" + ex.getMessage() + ")"); ex.printStackTrace(); return; } catch (SQLException ex) { Controller.setQuickResponseMessage(500, "error", "Server error, working to fix it!", response); FileSlaveServlet.writeToLog( "<ERROR> SearchController.processRequest() : SQLException(" + ex.getMessage() + ")"); ex.printStackTrace(); return; } ////////////////////////////////////////////////////////////////////////////////////// JSONListAdapter<ImageModel> resultsAdapter = new JSONListAdapter(); resultsAdapter.setData(results); JsonUtils responseBody = new JsonUtils(); responseBody.setStatus("success"); responseBody.setJSONAdapter(resultsAdapter); try { responseBody.writeToOutput(response.getWriter()); } catch (IOException ex) { FileSlaveServlet .writeToLog("<ERROR> SearchController.processRequest() : IOException(" + ex.getMessage() + ")"); response.setStatus(500); } response.setStatus(200); }
From source file:cv.recon.controller.OutputDisplayController.java
License:Open Source License
/** * Update output view after image processing. * @param src Original Mat/*from w ww. jav a 2 s. com*/ */ public void updateView(Mat src) { if (!src.empty()) { processImage(src); } if (!output.empty()) { if (writableImage == null) { writableImage = MatFXUtils.toFXImage(output, null); } else { MatFXUtils.toFXImage(output, writableImage); } outputView.setImage(writableImage); } }
From source file:edu.ucue.tfc.Modelo.VideoProcessor.java
public Image convertCvMatToImage(Mat frameToConvert) throws IOException { if (!buffer.empty()) { buffer.release();//from w w w . j a va 2s .c o m } try { Imgproc.resize(frameToConvert, frameToConvert, frameSize); Imgcodecs.imencode(".png", frameToConvert, buffer, params); } catch (Exception e) { System.out.println(e.getMessage()); } image = ImageIO.read(new ByteArrayInputStream(buffer.toArray())); if (!frameToConvert.empty()) { frameToConvert.release(); } return image; }
From source file:edu.ucue.tfc.Modelo.VideoProcessor.java
public void processVideo() { do {/*from w w w . j a v a 2s. c o m*/ Mat tmp = new Mat(); video.read(tmp); if (!tmp.empty()) { frame = tmp.clone(); tmp.release(); if (frameCounter < (getFrameCount() / 2) - 1) { frameCounter++; if (getMinutes() > 0) { carsPerMinute = getDetectedCarsCount() / getMinutes(); } processFrame(getFrame()); } else { frameCounter = 0; finished = true; System.out.println("Reiniciando.."); setFramePos(1); } } else { System.out.println("Imagen Vaca"); frameCounter = 0; finished = true; System.out.println("Reiniciando.."); setFramePos(1); } } while (frameCounter > (getFrameCount() / 2) - 2); }
From source file:FaceRecog.App.java
private void runMainLoop(String[] args) { ImageProcessor imageProcessor = new ImageProcessor(); Mat webcamMatImage = new Mat(); Image tempImage;/*from w ww . jav a2 s.c om*/ VideoCapture capture = new VideoCapture(0); capture.set(Videoio.CAP_PROP_FRAME_WIDTH, 320); capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, 240); if (capture.isOpened()) { while (true) { capture.read(webcamMatImage); if (!webcamMatImage.empty()) { tempImage = imageProcessor.toBufferedImage(webcamMatImage); ImageIcon imageIcon = new ImageIcon(tempImage, "Captured video"); imageLabel.setIcon(imageIcon); frame.pack(); //this will resize the window to fit the image } else { System.out.println(" -- Frame not captured -- Break!"); break; } } } else { System.out.println("Couldn't open capture."); } }