List of usage examples for org.opencv.core Mat width
public int width()
From source file:org.usfirst.frc.team2084.CMonster2016.vision.ImageConvertor.java
License:Open Source License
public BufferedImage toBufferedImage(Mat image) { int width = image.width(); int height = image.height(); int type = image.type(); // Get BufferedImage type int javaType = toJavaImageType(type); // If the Mat does not match the BufferedImage, create a new one. if (javaImage == null || width != javaImage.getWidth() || height != javaImage.getHeight() || javaType != javaImage.getType()) { javaImage = new BufferedImage(width, height, javaType); }//ww w .j a va 2s.co m // Copy Mat data to BufferedImage image.get(0, 0, ((DataBufferByte) javaImage.getRaster().getDataBuffer()).getData()); return javaImage; }
From source file:org.usfirst.frc2084.CMonster2016.vision.ImageFrame.java
License:Open Source License
/** * Shows the specified image in the frame. The frame resizes to fit the * image.//from w w w .j a v a2 s .c o m * * @param image the image to show */ public void showImage(Mat image) { // Get the properties of the Mat int width = image.width(); int height = image.height(); synchronized (this) { // Copy Mat data to BufferedImage javaImage = convertor.toBufferedImage(image); } setSize(width, height); repaint(); }
From source file:Recognizer.Recognizer.java
public Image TemplateMatching(Image imQuery, Image imDB, int match_method) { System.out.println("Running Template Matching ..."); //Mat img = Highgui.imread(inFile); // Image in which area has to be searched //Mat template_img = Highgui.imread(templateFile); // Search Image Mat matQuery = imQuery.Image3CtoMat_CV(); Mat matDB = imDB.Image3CtoMat_CV(); Mat hsvQ = new Mat(), hsvDB = new Mat(); Imgproc.cvtColor(matQuery, hsvQ, COLOR_RGB2HSV); Imgproc.cvtColor(matDB, hsvDB, COLOR_RGB2HSV); // Create result image matrix int resultImg_cols = matDB.cols() - matQuery.cols() + 1; int resultImg_rows = matDB.rows() - matQuery.rows() + 1; Mat matRes = new Mat(resultImg_rows, resultImg_cols, CvType.CV_32FC1); // Template Matching with Normalization Imgproc.matchTemplate(hsvDB, hsvQ, matRes, match_method); Core.normalize(matRes, matRes, 0, 1, Core.NORM_MINMAX, -1, new Mat()); // / Localizing the best match with minMaxLoc Core.MinMaxLocResult Location_Result = Core.minMaxLoc(matRes); Point matchLocation;/*from ww w . j av a 2 s . c o m*/ if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) { matchLocation = Location_Result.minLoc; } else { matchLocation = Location_Result.maxLoc; } // Display Area by Rectangle Core.rectangle(matDB, matchLocation, new Point(matchLocation.x + matQuery.cols(), matchLocation.y + matQuery.rows()), new Scalar(0, 255, 0)); Image imOut = new Image(matDB.width(), matDB.height()); //Image imOut = new Image(matQuery.cols(), matQuery.rows()); //Mat m = new Mat(matDB); //m =//matDB.submat((int)matchLocation.y, (int)matchLocation.y + matQuery.rows(),(int)matchLocation.x, (int)matchLocation.x + matQuery.cols()); imOut.Mat_CVtoImage3C(matDB); System.out.println("Location: " + Location_Result.minLoc.x + " " + Location_Result.minLoc.y + " " + Location_Result.maxLoc.x + " " + Location_Result.maxLoc.y); return imOut; }
From source file:servershootingstar.BallDetector.java
public static void initSwing(Mat mat) { guiFrame = new JFrame("window"); guiFrame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); // image size is 640x480 guiFrame.setSize(mat.width() + 20, mat.height() + 40); guiFrame.setLayout(new FlowLayout()); guiLabel = new JLabel(); guiFrame.add(guiLabel);/*from w ww. j ava 2 s .c o m*/ guiFrame.setVisible(true); updateSwing(mat); }
From source file:spring2017ip.ConvolutionDemo.java
public Mat combineGxGy(Mat gx, Mat gy) { Mat outputImage = new Mat(gx.rows(), gx.cols(), gx.type()); for (int r = 0; r < gx.height(); r++) for (int c = 0; c < gx.width(); c++) { double x[] = gx.get(r, c); double y[] = gy.get(r, c); double m = Math.sqrt(x[0] * x[0] + y[0] * y[0]); outputImage.put(r, c, m);//from w w w . ja v a 2 s .c om } return outputImage; }
From source file:spycam.Spycam.java
public static void main(String args[]) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); VideoCapture camera = new VideoCapture(0); if (!camera.isOpened()) { System.out.println("Error"); } else {//ww w. j a v a2 s .c o m Mat frame = new Mat(); while (true) { if (camera.read(frame)) { System.out.println("Frame Obtained"); System.out.println("Captured Frame Width " + frame.width() + " Height " + frame.height()); Imgcodecs.imwrite("camera.jpg", frame); System.out.println("OK"); break; } } } camera.release(); }
From source file:src.main.java.org.roomwatcher.watcher.Watcher.java
/** * Converts/writes a Mat into a BufferedImage. * // w w w. j a v a 2 s . c o m * @param matrix Mat of type CV_8UC3 or CV_8UC1 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY */ public boolean MatToBufferedImage(Mat matBGR) { long startTime = System.nanoTime(); int width = matBGR.width(), height = matBGR.height(), channels = matBGR.channels(); byte[] sourcePixels = new byte[width * height * channels]; matBGR.get(0, 0, sourcePixels); // create new image and get reference to backing data image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR); final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length); long endTime = System.nanoTime(); //System.out.println(String.format("Elapsed time: %.2f ms", (float)(endTime - startTime)/1000000)); return true; }
From source file:src.main.java.org.roomwatcher.watcher.Window.java
public static void main(String arg[]) throws InterruptedException { // Load the native library. System.loadLibrary("opencv_java2410"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setSize(400, 400);/*from w ww.j a va 2 s .com*/ frame.setLocationRelativeTo(null); Processor processor = new Processor(); Watcher watcher = new Watcher(); watcher.add(peopleNumberLabel); frame.setContentPane(watcher); frame.setVisible(true); //-- 2. Read the video stream Mat webcam_image = new Mat(); VideoCapture capture = new VideoCapture(0); if (capture.isOpened()) { while (true) { capture.read(webcam_image); if (!webcam_image.empty()) { frame.setSize(webcam_image.width() + 40, webcam_image.height() + 160); //-- 3. Apply the classifier to the captured image webcam_image = processor.detect(webcam_image); //-- 4. Display the image watcher.MatToBufferedImage(webcam_image); // We could look at the error... watcher.repaint(); } } } }
From source file:syncleus.dann.data.video.TLDView.java
License:Apache License
@Override public Mat onCameraFrame(Mat originalFrame) { try {//from w w w . j av a 2s . c o m // Image is too big and this requires too much CPU for a phone, so scale everything down... Imgproc.resize(originalFrame, _workingFrame, WORKING_FRAME_SIZE); final Size workingRatio = new Size(originalFrame.width() / WORKING_FRAME_SIZE.width, originalFrame.height() / WORKING_FRAME_SIZE.height); // usefull to see what we're actually working with... _workingFrame.copyTo(originalFrame.submat(originalFrame.rows() - _workingFrame.rows(), originalFrame.rows(), 0, _workingFrame.cols())); if (_trackedBox != null) { if (_tld == null) { // run the 1st time only Imgproc.cvtColor(_workingFrame, _lastGray, Imgproc.COLOR_RGB2GRAY); _tld = new Tld(_tldProperties); final Rect scaledDownTrackedBox = scaleDown(_trackedBox, workingRatio); System.out.println("Working Ration: " + workingRatio + " / Tracking Box: " + _trackedBox + " / Scaled down to: " + scaledDownTrackedBox); try { _tld.init(_lastGray, scaledDownTrackedBox); } catch (Exception eInit) { // start from scratch, you have to select an init box again ! _trackedBox = null; _tld = null; throw eInit; // re-throw it as it will be dealt with later } } else { Imgproc.cvtColor(_workingFrame, _currentGray, Imgproc.COLOR_RGB2GRAY); _processFrameStruct = _tld.processFrame(_lastGray, _currentGray); drawPoints(originalFrame, _processFrameStruct.lastPoints, workingRatio, new Scalar(255, 0, 0)); drawPoints(originalFrame, _processFrameStruct.currentPoints, workingRatio, new Scalar(0, 255, 0)); drawBox(originalFrame, scaleUp(_processFrameStruct.currentBBox, workingRatio), new Scalar(0, 0, 255)); _currentGray.copyTo(_lastGray); // overlay the current positive examples on the real image(needs converting at the same time !) //copyTo(_tld.getPPatterns(), originalFrame); } } } catch (Exception e) { _errMessage = e.getClass().getSimpleName() + " / " + e.getMessage(); Log.e(TLDUtil.TAG, "TLDView PROBLEM", e); } if (_errMessage != null) { Core.putText(originalFrame, _errMessage, new Point(0, 300), Core.FONT_HERSHEY_PLAIN, 1.3d, new Scalar(255, 0, 0), 2); } return originalFrame; }
From source file:topcodes.Scanner.java
License:Open Source License
/** * Scan the image and return a list of all topcodes found in it *///from ww w .java 2s . c om public List<TopCode> scan(Mat mat) throws IOException { int width = mat.width(), height = mat.height(), channels = mat.channels(); byte[] sourcePixels = new byte[width * height * channels]; mat.get(0, 0, sourcePixels); // create new BufferedImage and image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR); // Get reference to backing data final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length); return scan(image); }