List of usage examples for org.opencv.core Mat height
public int height()
From source file:spring2017ip.ConvolutionDemo.java
public Mat combineGxGy(Mat gx, Mat gy) { Mat outputImage = new Mat(gx.rows(), gx.cols(), gx.type()); for (int r = 0; r < gx.height(); r++) for (int c = 0; c < gx.width(); c++) { double x[] = gx.get(r, c); double y[] = gy.get(r, c); double m = Math.sqrt(x[0] * x[0] + y[0] * y[0]); outputImage.put(r, c, m);/*w w w . jav a2 s. c o m*/ } return outputImage; }
From source file:spycam.Spycam.java
public static void main(String args[]) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); VideoCapture camera = new VideoCapture(0); if (!camera.isOpened()) { System.out.println("Error"); } else {/*from w w w . j a v a 2 s. c o m*/ Mat frame = new Mat(); while (true) { if (camera.read(frame)) { System.out.println("Frame Obtained"); System.out.println("Captured Frame Width " + frame.width() + " Height " + frame.height()); Imgcodecs.imwrite("camera.jpg", frame); System.out.println("OK"); break; } } } camera.release(); }
From source file:src.main.java.org.roomwatcher.watcher.Watcher.java
/** * Converts/writes a Mat into a BufferedImage. * //from w w w . jav a2s. c o m * @param matrix Mat of type CV_8UC3 or CV_8UC1 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY */ public boolean MatToBufferedImage(Mat matBGR) { long startTime = System.nanoTime(); int width = matBGR.width(), height = matBGR.height(), channels = matBGR.channels(); byte[] sourcePixels = new byte[width * height * channels]; matBGR.get(0, 0, sourcePixels); // create new image and get reference to backing data image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR); final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length); long endTime = System.nanoTime(); //System.out.println(String.format("Elapsed time: %.2f ms", (float)(endTime - startTime)/1000000)); return true; }
From source file:src.main.java.org.roomwatcher.watcher.Window.java
public static void main(String arg[]) throws InterruptedException { // Load the native library. System.loadLibrary("opencv_java2410"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setSize(400, 400);// w ww . ja v a 2 s .co m frame.setLocationRelativeTo(null); Processor processor = new Processor(); Watcher watcher = new Watcher(); watcher.add(peopleNumberLabel); frame.setContentPane(watcher); frame.setVisible(true); //-- 2. Read the video stream Mat webcam_image = new Mat(); VideoCapture capture = new VideoCapture(0); if (capture.isOpened()) { while (true) { capture.read(webcam_image); if (!webcam_image.empty()) { frame.setSize(webcam_image.width() + 40, webcam_image.height() + 160); //-- 3. Apply the classifier to the captured image webcam_image = processor.detect(webcam_image); //-- 4. Display the image watcher.MatToBufferedImage(webcam_image); // We could look at the error... watcher.repaint(); } } } }
From source file:syncleus.dann.data.video.TLDView.java
License:Apache License
@Override public Mat onCameraFrame(Mat originalFrame) { try {//from ww w.ja va2 s . co m // Image is too big and this requires too much CPU for a phone, so scale everything down... Imgproc.resize(originalFrame, _workingFrame, WORKING_FRAME_SIZE); final Size workingRatio = new Size(originalFrame.width() / WORKING_FRAME_SIZE.width, originalFrame.height() / WORKING_FRAME_SIZE.height); // usefull to see what we're actually working with... _workingFrame.copyTo(originalFrame.submat(originalFrame.rows() - _workingFrame.rows(), originalFrame.rows(), 0, _workingFrame.cols())); if (_trackedBox != null) { if (_tld == null) { // run the 1st time only Imgproc.cvtColor(_workingFrame, _lastGray, Imgproc.COLOR_RGB2GRAY); _tld = new Tld(_tldProperties); final Rect scaledDownTrackedBox = scaleDown(_trackedBox, workingRatio); System.out.println("Working Ration: " + workingRatio + " / Tracking Box: " + _trackedBox + " / Scaled down to: " + scaledDownTrackedBox); try { _tld.init(_lastGray, scaledDownTrackedBox); } catch (Exception eInit) { // start from scratch, you have to select an init box again ! _trackedBox = null; _tld = null; throw eInit; // re-throw it as it will be dealt with later } } else { Imgproc.cvtColor(_workingFrame, _currentGray, Imgproc.COLOR_RGB2GRAY); _processFrameStruct = _tld.processFrame(_lastGray, _currentGray); drawPoints(originalFrame, _processFrameStruct.lastPoints, workingRatio, new Scalar(255, 0, 0)); drawPoints(originalFrame, _processFrameStruct.currentPoints, workingRatio, new Scalar(0, 255, 0)); drawBox(originalFrame, scaleUp(_processFrameStruct.currentBBox, workingRatio), new Scalar(0, 0, 255)); _currentGray.copyTo(_lastGray); // overlay the current positive examples on the real image(needs converting at the same time !) //copyTo(_tld.getPPatterns(), originalFrame); } } } catch (Exception e) { _errMessage = e.getClass().getSimpleName() + " / " + e.getMessage(); Log.e(TLDUtil.TAG, "TLDView PROBLEM", e); } if (_errMessage != null) { Core.putText(originalFrame, _errMessage, new Point(0, 300), Core.FONT_HERSHEY_PLAIN, 1.3d, new Scalar(255, 0, 0), 2); } return originalFrame; }
From source file:topcodes.Scanner.java
License:Open Source License
/** * Scan the image and return a list of all topcodes found in it *//*w w w .j a v a 2s . c o m*/ public List<TopCode> scan(Mat mat) throws IOException { int width = mat.width(), height = mat.height(), channels = mat.channels(); byte[] sourcePixels = new byte[width * height * channels]; mat.get(0, 0, sourcePixels); // create new BufferedImage and image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR); // Get reference to backing data final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length); return scan(image); }
From source file:uk.ac.horizon.artcodes.detect.handler.MarkerActionDetectionHandler.java
License:Open Source License
@Override public void onMarkersDetected(Collection<Marker> markers, ArrayList<MatOfPoint> contours, Mat hierarchy, Size sourceImageSize) {//from w ww.jav a2 s . com countMarkers(markers); long now = System.currentTimeMillis(); int best = 0; Action selected = null; for (Action action : this.experience.getActions()) { if (action.getMatch() == Action.Match.any) { for (String code : action.getCodes()) { int count = markerCounts.count(code); if (count > best) { selected = action; best = count; } } } else if (action.getMatch() == Action.Match.all) { int min = MAX; int total = 0; for (String code : action.getCodes()) { int count = markerCounts.count(code); min = Math.min(min, count); total += (count * 2); } if (min > REQUIRED && total > best) { best = total; selected = action; } } } if (best < REQUIRED) { if (currentAction != null) { if (now - lastSeen > REMAIN) { currentAction = null; this.markerActionHandler.onMarkerActionDetected(null, null, null); } } } else if (selected != currentAction) { currentAction = selected; lastSeen = now; ArrayList<MarkerImage> markerImages = null; if (this.markerDrawer != null) { Marker markerObject = null; for (Marker possibleMarkerObject : markers) { if (possibleMarkerObject.toString().equals(currentAction.getCodes().get(0))) { markerObject = possibleMarkerObject; } } if (markerObject != null) { final Rect boundingRect = Imgproc.boundingRect(contours.get(markerObject.markerIndex)); Mat thumbnailMat = this.markerDrawer.drawMarker(markerObject, contours, hierarchy, boundingRect, null); Bitmap thumbnail = Bitmap.createBitmap(thumbnailMat.width(), thumbnailMat.height(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(thumbnailMat, thumbnail); MarkerImage markerImage = new MarkerImage(markerObject.toString(), thumbnail, (float) (boundingRect.tl().x / sourceImageSize.width), (float) (boundingRect.tl().y / sourceImageSize.height), (float) (boundingRect.width / sourceImageSize.width), (float) (boundingRect.height / sourceImageSize.height)); markerImages = new ArrayList<>(1); markerImages.add(markerImage); Log.i("SOURCEIMG", "w" + sourceImageSize.width + " h" + sourceImageSize.height); } } this.markerActionHandler.onMarkerActionDetected(currentAction, currentAction, markerImages); } else { for (Marker possibleMarkerObject : markers) { String marker = possibleMarkerObject.toString(); for (String code : currentAction.getCodes()) { if (code.equals(marker)) { lastSeen = now; return; } } } } }
From source file:uk.ac.horizon.artcodes.detect.handler.MultipleMarkerActionDetectionHandler.java
License:Open Source License
private MarkerImage createImageForMarker(Marker marker, ArrayList<MatOfPoint> contours, Mat hierarchy, Size sourceImageSize) {//from ww w .ja v a2 s . co m if (marker != null) { final Rect boundingRect = Imgproc.boundingRect(contours.get(marker.markerIndex)); final Mat thumbnailMat = this.markerDrawer.drawMarker(marker, contours, hierarchy, boundingRect, null); final Bitmap thumbnail = Bitmap.createBitmap(thumbnailMat.width(), thumbnailMat.height(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(thumbnailMat, thumbnail); return new MarkerImage(marker.toString(), thumbnail, (float) (boundingRect.tl().x / sourceImageSize.width), (float) (boundingRect.tl().y / sourceImageSize.height), (float) (boundingRect.width / sourceImageSize.width), (float) (boundingRect.height / sourceImageSize.height)); } return null; }
From source file:video.PictureAnalyser.java
public void Calibrate(BufferedImage img) { Mat frameMat = new Mat(); frameMat = PictureView.bufferedImageToMat(img); pic = blur(frameMat, 3);/*from w w w . ja v a 2 s. co m*/ Mat imgHSV = new Mat(); Imgproc.cvtColor(pic, imgHSV, Imgproc.COLOR_RGB2HSV); int height = 10; int width = 10; int hue = 0; int saturation = 0; int value = 0; for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { double[] testColor = imgHSV.get((imgHSV.height() / 2) - (width / 2) + i, (imgHSV.width() / 2) - (height / 2) + j); if (testColor[0] > 140) testColor[0] = testColor[0] - 180; hue = hue + (int) testColor[0]; saturation = saturation + (int) testColor[1]; value = value + (int) testColor[2]; } } /* * Green values hue=55; saturation =240; value= 70; */ hue = hue / (height * width); saturation = saturation / (height * width); value = value / (height * width); this.color.get(0).val[0] = hue - 5; this.color.get(0).val[1] = saturation - 40; this.color.get(0).val[2] = value - 40; this.color.get(1).val[0] = hue + 5; this.color.get(1).val[1] = saturation + 40; this.color.get(1).val[2] = value + 40; }
From source file:video.PictureView.java
public static BufferedImage setCross(BufferedImage img) { Mat imgMat; imgMat = bufferedImageToMat(img);//from w w w . j av a 2 s .c o m Imgproc.circle(imgMat, new Point(imgMat.width() / 2, imgMat.height() / 2), 4, new Scalar(255, 49, 255, 255)); img = mat2Img(imgMat); return img; }