List of usage examples for org.opencv.core Mat Mat
public Mat()
From source file:com.compta.firstak.notedefrais.MainActivity.java
public void Opencv(String imageName) { bitmap = BitmapFactory.decodeFile(imageName); Mat imageMat = new Mat(); org.opencv.android.Utils.bitmapToMat(bitmap, imageMat); Imgproc.cvtColor(imageMat, imageMat, Imgproc.COLOR_BGR2GRAY); // 1) Apply gaussian blur to remove noise Imgproc.GaussianBlur(imageMat, imageMat, new Size(9, 9), 0); // 2) AdaptiveThreshold -> classify as either black or white Imgproc.adaptiveThreshold(imageMat, imageMat, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 5, 2);/* w w w . j a va 2 s . com*/ // 3) Invert the image -> so most of the image is black Core.bitwise_not(imageMat, imageMat); // 4) Dilate -> fill the image using the MORPH_DILATE Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_DILATE, new Size(3, 3), new Point(1, 1)); Imgproc.dilate(imageMat, imageMat, kernel); org.opencv.android.Utils.matToBitmap(imageMat, bitmap); mImageViewer.setImageBitmap(bitmap); ByteArrayOutputStream stream1 = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream1); byteArray = stream1.toByteArray(); }
From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.BackgroundSubtractor.java
License:Open Source License
@Override public Mat process(@NonNull Mat frame) { if (frame.empty()) { Log.e("Invalid input frame."); return null; }/* w ww. j a v a 2 s . c o m*/ Mat foreground = new Mat(); // Apply background substraction mog.apply(frame, foreground); return foreground; }
From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.ContoursFinder.java
License:Open Source License
/** * Default ContoursFinder constructor./* www . j a v a 2 s . c om*/ * minArea is initialized to 15 and maxArea to 800. */ public ContoursFinder() { contourList = new ArrayList<>(); hierarchy = new Mat(); this.minArea = MIN_AREA; this.maxArea = MAX_AREA; }
From source file:com.davidmiguel.gobees.monitoring.algorithm.processors.ContoursFinder.java
License:Open Source License
/** * ContoursFinder constructor./* w w w . ja v a 2s . c o m*/ * * @param minArea the min area to consider a contour a bee. * @param maxArea the max area to consider a contour a bee. */ public ContoursFinder(double minArea, double maxArea) { contourList = new ArrayList<>(); hierarchy = new Mat(); this.minArea = minArea; this.maxArea = maxArea; }
From source file:com.davidmiguel.gobees.monitoring.camera.CameraFrame.java
License:Open Source License
/** * CameraFrame constructor./* w w w. java2 s. c om*/ * * @param frame frame Mat where to store the frame data. * @param width frame width. * @param height frame height. */ CameraFrame(Mat frame, int width, int height) { super(); this.width = width; this.height = height; yuvFrameData = frame; rgba = new Mat(); }
From source file:com.davidmiguel.gobees.monitoring.MonitoringPresenter.java
License:Open Source License
@Override public void onCameraViewStarted(int width, int height) { processedFrame = new Mat(); bc = AreaBeesCounter.getInstance();/*from ww w . ja va 2 s . c o m*/ settingsView.initSettings(); }
From source file:com.dft.fingerwizardsampleapp.FingerWizardSample.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (FINGER_WIZARD_REQUEST_CODE == requestCode) { // If you want to display the preprocessed image on screen, put it in an ImageView // or similar ImageView preprocessedBitmapImageView = (ImageView) findViewById(R.id.preprocessed_bitmap_image_view); Bitmap processedBitmap = null;//w ww . java 2 s . c o m // Decode the bitmap from the file try { FileInputStream fis = new FileInputStream(mPreprocessedImageFile); InputStream buffer = new BufferedInputStream(fis); processedBitmap = BitmapFactory.decodeStream(buffer); buffer.close(); } catch (IOException iox) { Log.e(TAG, "Cannot perform input of processedBitmapFile. " + iox); } // TODO: this is an example of how to show the preprocessedBitmap. In a complete application, we'd need // to handle screen orientation changes preprocessedBitmapImageView.setImageBitmap(processedBitmap); /** * It is possible to use Onyx's built-in image pyramiding as follows */ if (processedBitmap != null) { double[] imageScales = new double[] { 0.8, 1.0, 1.2 }; // 80%, 100%, and 120% ArrayList<byte[]> scaledWSQs = new ArrayList<byte[]>(); Mat mat = new Mat(); Utils.bitmapToMat(processedBitmap, mat); Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2GRAY); // ensure image is grayscale MatVector vector = core.pyramidImage(mat, imageScales); for (int i = 0; i < imageScales.length; i++) { scaledWSQs.add(core.matToWsq(vector.get(i))); } for (int i = 0; i < scaledWSQs.size(); i++) { // TODO: send scaledWSQs.get(i) to server for matching... File inputFile = new File(Environment.getExternalStorageDirectory(), "matToWsQ" + System.currentTimeMillis() / 1000 + ".wsq"); try { FileOutputStream fos = new FileOutputStream(inputFile.getPath()); fos.write(scaledWSQs.get(i)); fos.close(); } catch (IOException e) { Log.e(TAG, e.getMessage()); } } } // Get the EnrollmentMetric EnrollmentMetric em = null; if (data != null && data.hasExtra(Consts.EXTRA_ENROLLMENT_METRIC)) { em = (EnrollmentMetric) data.getSerializableExtra(Consts.EXTRA_ENROLLMENT_METRIC); } // Get the finger location if (em != null) { String fingerLocation = em.getFingerLocation().toString(); Log.d(TAG, "The fingerLocation, " + fingerLocation + ", is the String " + "representation of the finger in the enum, EnumFinger."); // If you want a fingerprint template for enrollment, and can be // matched using Onyx, get it in the following manner FingerprintTemplate ft = em.getFingerprintTemplateArray()[0]; // The fingerprint template contains the NFIQ score of the pre-processed image // that was used to create it Log.d(TAG, "FingerprintTemplate NFIQ Score = " + ft.getNfiqScore()); // The EnrollmentMetric also contains the NFIQ score int nfiqScore = em.getHighestNFIQScore(); Log.d(TAG, "NFIQ Score = " + nfiqScore); } } }
From source file:com.example.afs.makingmusic.process.CameraReader.java
License:Open Source License
@Override public Frame process() throws InterruptedException { Mat image = new Mat(); while (!camera.read(image)) { System.err.println("Cannot read image. Is the camera plugged in?"); sleep(100);/* w w w .j av a2 s .c om*/ camera.release(); camera = new VideoCapture(0); } Injector.getMetrics().setFrames(++frameCount); return new Frame(image); }
From source file:com.example.afs.makingmusic.process.MotionDetector.java
License:Open Source License
public MotionDetector(BlockingQueue<Frame> inputQueue) { super(inputQueue); backgroundSubtractor = Video.createBackgroundSubtractorMOG2(5, 16, false); foregroundMask = new Mat(); }
From source file:com.example.afs.makingmusic.process.MotionDetector.java
License:Open Source License
@Override public void process(Frame frame) { Mat image = frame.getImageMatrix();//from www . j a va 2 s . c om Core.flip(image, image, 1); backgroundSubtractor.apply(image, foregroundMask); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(foregroundMask.clone(), contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); Collections.shuffle(contours); int contourCount = contours.size(); for (int contourIndex = 0; contourIndex < contourCount; contourIndex++) { MatOfPoint contour = contours.get(contourIndex); double contourArea = Imgproc.contourArea(contour); if (contourArea > MotionDetector.MINIMUM_AREA) { Rect item = Imgproc.boundingRect(contour); frame.addItem(item); itemCount++; } } Injector.getMetrics().setItems(itemCount); }