List of usage examples for org.opencv.core Mat Mat
public Mat()
From source file:classes.TextRecognitionPreparer.java
public static ArrayList<BufferedImage> generateRecognizableBufferedImages(Mat img, Scalar backgroundColor, Scalar userPickedColor) {/* www . jav a2 s.c o m*/ ArrayList<BufferedImage> images = new ArrayList<BufferedImage>(); Mat filledImage = img.clone(); Scalar newVal = new Scalar(userPickedColor.val[2], userPickedColor.val[1], userPickedColor.val[0]); Imgproc.floodFill(filledImage, new Mat(), new Point(0, 0), newVal); images.add(Util.mat2Img(filledImage)); Mat filledGrayImage = new Mat(); Imgproc.cvtColor(filledImage, filledGrayImage, Imgproc.COLOR_BGR2GRAY); images.add(Util.mat2Img(filledGrayImage)); Mat gaussianGrayImage = new Mat(); Imgproc.GaussianBlur(filledGrayImage, gaussianGrayImage, new Size(0, 0), 3); Core.addWeighted(filledGrayImage, 3.5, gaussianGrayImage, -1, 0, gaussianGrayImage); images.add(Util.mat2Img(gaussianGrayImage)); Mat filledBinarizedImage2 = new Mat(); Imgproc.adaptiveThreshold(filledGrayImage, filledBinarizedImage2, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 75, 10); images.add(Util.mat2Img(filledBinarizedImage2)); Mat filledBinarizedImage1 = new Mat(); Imgproc.adaptiveThreshold(filledGrayImage, filledBinarizedImage1, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 4); images.add(Util.mat2Img(filledBinarizedImage1)); return images; }
From source file:classes.TextRecognitionPreparer.java
public static ArrayList<Mat> generateRecognizableImages(Mat img, Scalar backgroundColor, Scalar userPickedColor) {/*from ww w .j a v a 2 s .c om*/ ArrayList<Mat> images = new ArrayList<Mat>(); Mat filledImage = img.clone(); Scalar newVal = new Scalar(userPickedColor.val[2], userPickedColor.val[1], userPickedColor.val[0]); Imgproc.floodFill(filledImage, new Mat(), new Point(0, 0), newVal); String file1 = "filledImage.png"; // Highgui.imwrite(file1, filledImage); images.add(filledImage); Mat filledGrayImage = new Mat(); Imgproc.cvtColor(filledImage, filledGrayImage, Imgproc.COLOR_BGR2GRAY); String file2 = "filledGrayImage.png"; // Highgui.imwrite(file2, filledGrayImage); images.add(filledGrayImage); Mat gaussianGrayImage = new Mat(); Imgproc.GaussianBlur(filledGrayImage, gaussianGrayImage, new Size(0, 0), 3); Core.addWeighted(filledGrayImage, 3.5, gaussianGrayImage, -1, 0, gaussianGrayImage); // Core.addWeighted(filledGrayImage, 2.5, gaussianGrayImage, -0.5, 0, gaussianGrayImage); String file3 = "sharpenedImage.png"; // Highgui.imwrite(file3, gaussianGrayImage); images.add(gaussianGrayImage); Mat filledBinarizedImage = new Mat(); Imgproc.adaptiveThreshold(filledGrayImage, filledBinarizedImage, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 4); String file4 = "filledBinarizedImage.png"; // Highgui.imwrite(file4, filledBinarizedImage); images.add(filledBinarizedImage); // BackgroundSubtractorMOG2 backgroundSubtractorMOG2 = new BackgroundSubtractorMOG2(); // Mat foregroundMask = new Mat(); // backgroundSubtractorMOG2.apply(img, foregroundMask); // Highgui.imwrite("mFGMask.png", foregroundMask); Scalar fillingColor = cluster(userPickedColor, img, 3); Mat replacedColor = replaceColor(img, backgroundColor, fillingColor); String file5 = "replacedColor.png"; // Highgui.imwrite(file5, replacedColor); images.add(replacedColor); Mat grayImage = new Mat(); Imgproc.cvtColor(replacedColor, grayImage, Imgproc.COLOR_BGR2GRAY); String file6 = "grayImage.png"; // Highgui.imwrite(file6, grayImage); images.add(grayImage); Mat binarized = new Mat(); Imgproc.adaptiveThreshold(grayImage, binarized, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 4); String file7 = "binarized.png"; // Highgui.imwrite(file7, binarized); images.add(binarized); Mat colorReplacedEqualized = equalizeIntensity(replacedColor); String file8 = "colorReplacedEqualized.png"; // Highgui.imwrite(file8, colorReplacedEqualized); images.add(colorReplacedEqualized); Mat colorReducedImage = reduceColor(replacedColor, 64); String file9 = "replacedColorColorReduced.png"; // Highgui.imwrite(file9, colorReducedImage); images.add(colorReducedImage); // Equalizing image Mat colorReducedEqualized = equalizeIntensity(colorReducedImage); String file10 = "colorReducedEqualized.png"; // Highgui.imwrite(file10, colorReducedEqualized); images.add(colorReducedEqualized); return images; }
From source file:classes.TextRecognitionPreparer.java
public static Scalar cluster(Scalar userColor, Mat cutout, int k) { Mat samples = cutout.reshape(1, cutout.cols() * cutout.rows()); Mat samples32f = new Mat(); samples.convertTo(samples32f, CvType.CV_32F, 1.0 / 255.0); Mat labels = new Mat(); TermCriteria criteria = new TermCriteria(TermCriteria.COUNT, 100, 1); Mat centers = new Mat(); Core.kmeans(samples32f, k, labels, criteria, 1, Core.KMEANS_PP_CENTERS, centers); Scalar fillingColor = getFillingColor(userColor, cutout, labels, centers); return fillingColor; }
From source file:classes.TextRecognitionPreparer.java
public static Mat equalizeIntensity(Mat inputImage) { if (inputImage.channels() >= 3) { Mat ycrcb = new Mat(); Imgproc.cvtColor(inputImage, ycrcb, Imgproc.COLOR_BGR2YUV); ArrayList<Mat> channels = new ArrayList<Mat>(); Core.split(ycrcb, channels);/*from w ww .j a va 2 s. co m*/ Mat equalized = new Mat(); Imgproc.equalizeHist(channels.get(0), equalized); channels.set(0, equalized); Core.merge(channels, ycrcb); Mat result = new Mat(); Imgproc.cvtColor(ycrcb, result, Imgproc.COLOR_YUV2BGR); return result; } return null; }
From source file:cmib_4_4.Countour.java
public static void main(String args[]) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat image = Highgui.imread("input1.jpg", Highgui.CV_LOAD_IMAGE_GRAYSCALE); Mat image1 = Highgui.imread("input1.jpg", Highgui.CV_LOAD_IMAGE_GRAYSCALE); Mat image4 = Highgui.imread("input1.jpg"); Imgproc.threshold(image1, image1, 0, 255, THRESH_OTSU); Imgproc.Canny(image1, image1, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU); Mat image2 = Mat.zeros(image.rows() + 2, image.cols() + 2, CV_8U); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(image1, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); for (int i = 0; i < contours.size(); i++) { if (Imgproc.contourArea(contours.get(i)) > 100) { Rect rect = Imgproc.boundingRect(contours.get(i)); Imgproc.floodFill(image1, image2, new Point(150, 150), new Scalar(255)); Rect rectCrop = new Rect(rect.x, rect.y, rect.width, rect.height); Mat image_roi_rgb = new Mat(image4, rectCrop); Highgui.imwrite("crop2.jpg", image_roi_rgb); if (rect.height > 28) { Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255)); }// w w w . j a v a 2 s . c o m } } Highgui.imwrite("falciparum2.jpg", image); }
From source file:cmib_4_4.Feature.java
public int[] featureVector(Mat image1) { Mat image = new Mat(); Size sz = new Size(30, 30); Imgproc.resize(image1, image, sz);//from w w w . j ava 2 s .co m int size = (int) (image1.total() * image1.channels()); int size2 = (image.width() * image.height()); double[][] spec1 = new double[size2][3]; FeatureVector A = new FeatureVector(); int k = 0; for (int i = 0; i < image.rows(); i++) { for (int j = 0; j < image.cols(); j++) { //image.get(i, j, rgb); double[] rgb = image.get(i, j); double[] a = A.cartToSpec(rgb[0], rgb[1], rgb[2]); double x = Math.toRadians(90); spec1[k][0] = a[0] / x; spec1[k][1] = a[1] / x; spec1[k][2] = a[2] / x; //System.out.println(rgb[0]); //System.out.println(spec1[k][2]); k++; } } int[][] b = new int[11][11]; for (int i = 0; i < 11; i++) { for (int j = 0; j < 11; j++) { b[i][j] = 0; } } for (int i = 0; i < 900; i++) { int x1 = (int) (Math.round(spec1[i][1] * 10)); int y1 = (int) (Math.round(spec1[i][2] * 10)); b[x1][y1] = b[x1][y1] + 1; //System.out.println(x1+"and"+y1); } int l = 0; int[] c = new int[121]; for (int i = 0; i < 11; i++) { for (int j = 0; j < 11; j++) { c[l] = b[i][j]; l++; //System.out.println(c[l-1]); } } return c; }
From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java
License:Apache License
/** * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec> * * @param recs output ArrayList of AttitudeRec * @return total number of frame of the video *//*from www . jav a 2 s . c o m*/ private int analyzeVideo(ArrayList<AttitudeRec> recs) { VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json")); int decimation = 1; boolean use_timestamp = true; // roughly determine if decimation is necessary if (meta.fps > DECIMATION_FPS_TARGET) { decimation = (int) (meta.fps / DECIMATION_FPS_TARGET); meta.fps /= decimation; } VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV(new File(mPath, "video.mp4"), decimation); Mat frame; Mat gray = new Mat(); int i = -1; Size frameSize = videoDecoder.getSize(); if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) { // this is very unlikely return -1; } if (TRACE_VIDEO_ANALYSIS) { Debug.startMethodTracing("cvprocess"); } Size patternSize = new Size(4, 11); float fc = (float) (meta.frameWidth / 2.0 / Math.tan(meta.fovWidth / 2.0)); Mat camMat = cameraMatrix(fc, new Size(frameSize.width / 2, frameSize.height / 2)); MatOfDouble coeff = new MatOfDouble(); // dummy MatOfPoint2f centers = new MatOfPoint2f(); MatOfPoint3f grid = asymmetricalCircleGrid(patternSize); Mat rvec = new MatOfFloat(); Mat tvec = new MatOfFloat(); MatOfPoint2f reprojCenters = new MatOfPoint2f(); if (LOCAL_LOGV) { Log.v(TAG, "Camera Mat = \n" + camMat.dump()); } long startTime = System.nanoTime(); long[] ts = new long[1]; while ((frame = videoDecoder.getFrame(ts)) != null) { if (LOCAL_LOGV) { Log.v(TAG, "got a frame " + i); } if (use_timestamp && ts[0] == -1) { use_timestamp = false; } // has to be in front, as there are cases where execution // will skip the later part of this while i++; // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY); boolean foundPattern = Calib3d.findCirclesGrid(gray, patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID); if (!foundPattern) { // skip to next frame continue; } if (OUTPUT_DEBUG_IMAGE) { Calib3d.drawChessboardCorners(frame, patternSize, centers, true); } // figure out the extrinsic parameters using real ground truth 3D points and the pixel // position of blobs found in findCircleGrid, an estimated camera matrix and // no-distortion are assumed. boolean foundSolution = Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, false, Calib3d.CV_ITERATIVE); if (!foundSolution) { // skip to next frame if (LOCAL_LOGV) { Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped."); } continue; } // reproject points to for evaluation of result accuracy of solvePnP Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters); // error is evaluated in norm2, which is real error in pixel distance / sqrt(2) double error = Core.norm(centers, reprojCenters, Core.NORM_L2); if (LOCAL_LOGV) { Log.v(TAG, "Found attitude, re-projection error = " + error); } // if error is reasonable, add it into the results. use ratio to frame height to avoid // discriminating higher definition videos if (error < REPROJECTION_THREASHOLD_RATIO * frameSize.height) { double[] rv = new double[3]; double timestamp; rvec.get(0, 0, rv); if (use_timestamp) { timestamp = (double) ts[0] / 1e6; } else { timestamp = (double) i / meta.fps; } if (LOCAL_LOGV) Log.v(TAG, String.format("Added frame %d ts = %f", i, timestamp)); recs.add(new AttitudeRec(timestamp, rodr2rpy(rv))); } if (OUTPUT_DEBUG_IMAGE) { Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true); Imgcodecs.imwrite(Environment.getExternalStorageDirectory().getPath() + "/RVCVRecData/DebugCV/img" + i + ".png", frame); } } if (LOCAL_LOGV) { Log.v(TAG, "Finished decoding"); } if (TRACE_VIDEO_ANALYSIS) { Debug.stopMethodTracing(); } if (LOCAL_LOGV) { // time analysis double totalTime = (System.nanoTime() - startTime) / 1e9; Log.i(TAG, "Total time: " + totalTime + "s, Per frame time: " + totalTime / i); } return i; }
From source file:com.astrocytes.core.operationsengine.CoreOperations.java
License:Open Source License
/** * Invert image.//w w w . ja v a 2 s . c o m * * @param src - source image. * @return inverted image. */ public static Mat invert(Mat src) { Mat dest = new Mat(); Core.bitwise_not(src, dest); return dest; }
From source file:com.astrocytes.core.operationsengine.CoreOperations.java
License:Open Source License
/** * Applies thresholding for gray image.//from w w w .ja v a2 s . c om * * @param src - gray source image. * @param thresh - the level of threshold. * @return thresholded gray image. */ public static Mat threshold(Mat src, int thresh) { Mat dest = new Mat(); Imgproc.threshold(src, dest, thresh, 255, Imgproc.THRESH_BINARY); return dest; }
From source file:com.astrocytes.core.operationsengine.CoreOperations.java
License:Open Source License
/** * Applies thresholding for color image. * * @param src - color source image./* w w w. j av a2s. c o m*/ * @param r - the value for red value in threshold. * @param g - the value for green value in threshold. * @param b - the value for blue value in threshold. * @return thresholded color image. */ public static Mat threshold(Mat src, int r, int g, int b) { if (src.channels() < 3) return src; Mat dest = new Mat(); Mat srcBin = new Mat(); Imgproc.threshold(src, srcBin, 1, 255, Imgproc.THRESH_BINARY); Core.inRange(src, new Scalar(0), new Scalar(r, g, b), dest); dest = invert(dest); cvtColor(dest, dest, Imgproc.COLOR_GRAY2BGR); dest = xor(srcBin, dest); dest = and(src, dest); return dest; }