List of usage examples for org.opencv.core Mat rows
public int rows()
From source file:kamerka.Filters.java
public void erosion(String sourcePath, int size) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR); Mat destination = new Mat(source.rows(), source.cols(), source.type()); destination = source;//from w w w . j a va 2 s. c o m Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * size + 1, 2 * size + 1)); Imgproc.erode(source, destination, element); Highgui.imwrite(sourcePath, destination); }
From source file:kamerka.Filters.java
public void dilation(String sourcePath, int size) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat source = Highgui.imread(sourcePath, Highgui.CV_LOAD_IMAGE_COLOR); Mat destination = new Mat(source.rows(), source.cols(), source.type()); destination = source;//from w w w . j ava 2s . c o m Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2 * size + 1, 2 * size + 1)); Imgproc.dilate(source, destination, element); Highgui.imwrite(sourcePath, destination); }
From source file:karthik.Barcode.Barcode.java
License:Open Source License
public static boolean updateImage(Barcode barcode, Mat img) { // used for video or camera feed when all images are the same size int orig_rows = barcode.img_details.src_original.rows(); int orig_cols = barcode.img_details.src_original.cols(); int new_rows = img.rows(); int new_cols = img.cols(); if ((orig_rows != new_rows) || (orig_cols != new_cols)) return false; barcode.candidateBarcodes.clear();/*w w w .j ava2 s . c o m*/ barcode.img_details.src_original = img; Imgproc.resize(barcode.img_details.src_original, barcode.img_details.src_scaled, barcode.img_details.src_scaled.size(), 0, 0, Imgproc.INTER_AREA); Imgproc.cvtColor(barcode.img_details.src_scaled, barcode.img_details.src_grayscale, Imgproc.COLOR_RGB2GRAY); return true; }
From source file:karthik.Barcode.Barcode.java
License:Open Source License
protected Mat scale_candidateBarcode(Mat candidate) { // resizes candidate image to have at least MIN_COLS columns and MIN_ROWS rows // called when RESIZE_BEFORE_DECODE is set - seems to help ZXing decode barcode int MIN_COLS = 200; int MIN_ROWS = 200; int num_rows = candidate.rows(); int num_cols = candidate.cols(); if ((num_cols > MIN_COLS) && (num_rows > MIN_ROWS)) return candidate; if (num_cols < MIN_COLS) { num_rows = (int) (num_rows * MIN_COLS / (1.0 * num_cols)); num_cols = MIN_COLS;//from ww w. j a va 2s . co m } if (num_rows < MIN_ROWS) { num_cols = (int) (num_cols * MIN_ROWS / (1.0 * num_rows)); num_rows = MIN_ROWS; } Mat result = Mat.zeros(num_rows, num_cols, candidate.type()); Imgproc.resize(candidate, result, result.size(), 0, 0, Imgproc.INTER_CUBIC); return result; }
From source file:karthik.Barcode.CandidateMatrixBarcode.java
License:Open Source License
CandidateResult NormalizeCandidateRegion(double angle) { /* candidateRegion is the RotatedRect which contains a candidate region for the barcode // angle is the rotation angle or USE_ROTATED_RECT_ANGLE for this function to // estimate rotation angle from the rect parameter // returns Mat containing cropped area(region of interest) with just the barcode // The barcode region is from the *original* image, not the scaled image // the cropped area is also rotated as necessary to be horizontal or vertical rather than skewed // Some parts of this function are from http://felix.abecassis.me/2011/10/opencv-rotation-deskewing/ // and http://stackoverflow.com/questions/22041699/rotate-an-image-without-cropping-in-opencv-in-c *//*from ww w. j ava 2s.c o m*/ double rotation_angle; CandidateResult result = new CandidateResult(); // scale candidate region back up to original size to return cropped part from *original* image // need the 1.0 there to force floating-point arithmetic from int values double scale_factor = img_details.src_original.rows() / (1.0 * img_details.src_grayscale.rows()); // expand the region found - this helps capture the entire code including the border zone candidateRegion.size.width += 2 * params.RECT_WIDTH; candidateRegion.size.height += 2 * params.RECT_HEIGHT; // calculate location of rectangle in original image and its corner points RotatedRect scaledRegion = new RotatedRect(candidateRegion.center, candidateRegion.size, candidateRegion.angle); scaledRegion.center.x = scaledRegion.center.x * scale_factor; scaledRegion.center.y = scaledRegion.center.y * scale_factor; scaledRegion.size.height *= scale_factor; scaledRegion.size.width *= scale_factor; scaledRegion.points(img_details.scaledCorners); // lets get the coordinates of the ROI in the original image and save it result.ROI_coords = Arrays.copyOf(img_details.scaledCorners, 4); // get the bounding rectangle of the ROI by sorting its corner points // we do it manually because RotatedRect can generate corner points outside the Mat area Arrays.sort(img_details.scaledCorners, CandidateBarcode.get_x_comparator()); int leftCol = (int) img_details.scaledCorners[0].x; int rightCol = (int) img_details.scaledCorners[3].x; leftCol = (leftCol < 0) ? 0 : leftCol; rightCol = (rightCol > img_details.src_original.cols() - 1) ? img_details.src_original.cols() - 1 : rightCol; Arrays.sort(img_details.scaledCorners, CandidateBarcode.get_y_comparator()); int topRow = (int) img_details.scaledCorners[0].y; int bottomRow = (int) img_details.scaledCorners[3].y; topRow = (topRow < 0) ? 0 : topRow; bottomRow = (bottomRow > img_details.src_original.rows() - 1) ? img_details.src_original.rows() - 1 : bottomRow; Mat ROI_region = img_details.src_original.submat(topRow, bottomRow, leftCol, rightCol); // create a container that is a square with side = diagonal of ROI. // this is large enough to accommodate the ROI region with rotation without cropping it int orig_rows = bottomRow - topRow; int orig_cols = rightCol - leftCol; int diagonal = (int) Math.sqrt(orig_rows * orig_rows + orig_cols * orig_cols); int newWidth = diagonal + 1; int newHeight = diagonal + 1; int offsetX = (newWidth - orig_cols) / 2; int offsetY = (newHeight - orig_rows) / 2; Mat enlarged_ROI_container = new Mat(newWidth, newHeight, img_details.src_original.type()); enlarged_ROI_container.setTo(ZERO_SCALAR); // copy ROI to centre of container and rotate it ROI_region.copyTo(enlarged_ROI_container.rowRange(offsetY, offsetY + orig_rows).colRange(offsetX, offsetX + orig_cols)); Point enlarged_ROI_container_centre = new Point(enlarged_ROI_container.rows() / 2.0, enlarged_ROI_container.cols() / 2.0); Mat rotated = Mat.zeros(enlarged_ROI_container.size(), enlarged_ROI_container.type()); if (angle == Barcode.USE_ROTATED_RECT_ANGLE) rotation_angle = estimate_barcode_orientation(); else rotation_angle = angle; // perform the affine transformation img_details.rotation_matrix = Imgproc.getRotationMatrix2D(enlarged_ROI_container_centre, rotation_angle, 1.0); img_details.rotation_matrix.convertTo(img_details.rotation_matrix, CvType.CV_32F); // convert type so matrix multip. works properly img_details.newCornerCoord.setTo(ZERO_SCALAR); // convert scaledCorners to contain locations of corners in enlarged_ROI_container Mat img_details.scaledCorners[0] = new Point(offsetX, offsetY); img_details.scaledCorners[1] = new Point(offsetX, offsetY + orig_rows); img_details.scaledCorners[2] = new Point(offsetX + orig_cols, offsetY); img_details.scaledCorners[3] = new Point(offsetX + orig_cols, offsetY + orig_rows); // calculate the new location for each corner point of the rectangle ROI after rotation for (int r = 0; r < 4; r++) { img_details.coord.put(0, 0, img_details.scaledCorners[r].x); img_details.coord.put(1, 0, img_details.scaledCorners[r].y); Core.gemm(img_details.rotation_matrix, img_details.coord, 1, img_details.delta, 0, img_details.newCornerCoord); updatePoint(img_details.newCornerPoints.get(r), img_details.newCornerCoord.get(0, 0)[0], img_details.newCornerCoord.get(1, 0)[0]); } rotated.setTo(ZERO_SCALAR); Imgproc.warpAffine(enlarged_ROI_container, rotated, img_details.rotation_matrix, enlarged_ROI_container.size(), Imgproc.INTER_CUBIC); // sort rectangles points in order by first sorting all 4 points based on x // we then sort the first two based on y and then the next two based on y // this leaves the array in order top-left, bottom-left, top-right, bottom-right Collections.sort(img_details.newCornerPoints, CandidateBarcode.get_x_comparator()); Collections.sort(img_details.newCornerPoints.subList(0, 2), CandidateBarcode.get_y_comparator()); Collections.sort(img_details.newCornerPoints.subList(2, 4), CandidateBarcode.get_y_comparator()); // calc height and width of rectangular region double height = length(img_details.newCornerPoints.get(1), img_details.newCornerPoints.get(0)); double width = length(img_details.newCornerPoints.get(2), img_details.newCornerPoints.get(0)); // create destination points for warpPerspective to map to updatePoint(img_details.transformedPoints.get(0), 0, 0); updatePoint(img_details.transformedPoints.get(1), 0, height); updatePoint(img_details.transformedPoints.get(2), width, 0); updatePoint(img_details.transformedPoints.get(3), width, height); Mat perspectiveTransform = Imgproc.getPerspectiveTransform( Converters.vector_Point2f_to_Mat(img_details.newCornerPoints), Converters.vector_Point2f_to_Mat(img_details.transformedPoints)); Mat perspectiveOut = Mat.zeros((int) height + 2, (int) width + 2, CvType.CV_32F); Imgproc.warpPerspective(rotated, perspectiveOut, perspectiveTransform, perspectiveOut.size(), Imgproc.INTER_CUBIC); result.ROI = perspectiveOut; return result; }
From source file:karthiknr.TextID.ProcessAsyncActivity.java
License:Apache License
@Override protected Bitmap doInBackground(Object... params) { try {/*from w ww . j av a2 s. co m*/ if (params.length < 2) { Log.e(TAG, "Error passing parameter to execute - missing params"); return null; } if (!(params[0] instanceof Context) || !(params[1] instanceof Bitmap)) { Log.e(TAG, "Error passing parameter to execute(context, bitmap)"); return null; } context = (Context) params[0]; bmp = (Bitmap) params[1]; if (context == null || bmp == null) { Log.e(TAG, "Error passed null parameter to execute(context, bitmap)"); return null; } Log.v(TAG, "Saving original bitmap"); FileOutputStream out = null; try { out = new FileOutputStream(DATA_PATH + "/oocr.png"); bmp.compress(Bitmap.CompressFormat.PNG, 100, out); } catch (Exception e) { e.printStackTrace(); } finally { try { if (out != null) { out.close(); } } catch (IOException e) { e.printStackTrace(); } } Log.v(TAG, "Starting Processing"); //OpenCV Warping Bitmap mutableBitmap = bmp.copy(Bitmap.Config.ARGB_8888, true); Mat imgSource = new Mat(mutableBitmap.getHeight(), mutableBitmap.getWidth(), CvType.CV_8UC1); Utils.bitmapToMat(mutableBitmap, imgSource); Mat startM = findWarpedMat(imgSource); Mat sourceImage = new Mat(mutableBitmap.getHeight(), mutableBitmap.getWidth(), CvType.CV_8UC1); Utils.bitmapToMat(mutableBitmap, sourceImage); Mat warpedMat = warpImage(sourceImage, startM); Bitmap resultBitmap = Bitmap.createBitmap(warpedMat.cols(), warpedMat.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(warpedMat, resultBitmap); Log.v(TAG, "Got warped bitmap"); Log.v(TAG, "Saving warped bitmap"); out = null; try { out = new FileOutputStream(DATA_PATH + "/wocr.png"); resultBitmap.compress(Bitmap.CompressFormat.PNG, 100, out); } catch (Exception e) { e.printStackTrace(); } finally { try { if (out != null) { out.close(); } } catch (IOException e) { e.printStackTrace(); } } return resultBitmap; } catch (Exception ex) { Log.d(TAG, "Error: " + ex + "\n" + ex.getMessage()); } return null; }
From source file:LetsStart.utils.ImageProcessor.java
public BufferedImage toBufferedImage(Mat matrix) { int type = BufferedImage.TYPE_BYTE_GRAY; if (matrix.channels() > 1) { type = BufferedImage.TYPE_3BYTE_BGR; }/*from ww w.j ava2s . c om*/ int bufferSize = matrix.channels() * matrix.cols() * matrix.rows(); byte[] buffer = new byte[bufferSize]; matrix.get(0, 0, buffer); // get all the pixels BufferedImage image = new BufferedImage(matrix.cols(), matrix.rows(), type); final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); System.arraycopy(buffer, 0, targetPixels, 0, buffer.length); return image; }
From source file:LetsStart.utils.ImageViewer.java
public Image toBufferedImage(Mat matrix) { int type = BufferedImage.TYPE_BYTE_GRAY; if (matrix.channels() > 1) { type = BufferedImage.TYPE_3BYTE_BGR; }//from w ww .java2 s . c o m int bufferSize = matrix.channels() * matrix.cols() * matrix.rows(); byte[] buffer = new byte[bufferSize]; matrix.get(0, 0, buffer); // get all the pixels BufferedImage image = new BufferedImage(matrix.cols(), matrix.rows(), type); final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); System.arraycopy(buffer, 0, targetPixels, 0, buffer.length); return image; }
From source file:logic.featurepointextractor.EyeBrowsFPE.java
/** * getSkeleton obtain thin 1-pixel region from contour. * @param src input binary image//from ww w .j a v a 2 s . c o m * @return binary image */ private Mat getSkeleton(Mat src) { Mat skel = new Mat(src.rows(), src.cols(), CV_8UC1, new Scalar(0)); Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3)); Mat tmp = new Mat(); Mat eroded = new Mat(); boolean done = false; do { Imgproc.morphologyEx(src, eroded, Imgproc.MORPH_ERODE, element); Imgproc.morphologyEx(eroded, tmp, Imgproc.MORPH_DILATE, element); Core.subtract(src, tmp, tmp); Core.bitwise_or(skel, tmp, skel); eroded.copyTo(src); done = (Core.countNonZero(src) == 0); } while (!done); return skel; }
From source file:logic.featurepointextractor.MouthFPE.java
/** * Detect mouth feature points/* w ww.java2s .co m*/ * Algorithm: Equalize histogram of mouth rect * Implement Sobel horizontal filter * Find corners * Invert color + Binarization * Find lip up and down points * @param mc * @return */ @Override public Point[] detect(MatContainer mc) { /**Algorithm * find pix(i) = (R-G)/R * normalize: 2arctan(pix(i))/pi */ //find pix(i) = (R-G)/R Mat mouthRGBMat = mc.origFrame.submat(mc.mouthRect); List mouthSplitChannelsList = new ArrayList<Mat>(); Core.split(mouthRGBMat, mouthSplitChannelsList); //extract R-channel Mat mouthR = (Mat) mouthSplitChannelsList.get(2); mouthR.convertTo(mouthR, CvType.CV_64FC1); //extract G-channel Mat mouthG = (Mat) mouthSplitChannelsList.get(1); mouthG.convertTo(mouthG, CvType.CV_64FC1); //calculate (R-G)/R Mat dst = new Mat(mouthR.rows(), mouthR.cols(), CvType.CV_64FC1); mc.mouthProcessedMat = new Mat(mouthR.rows(), mouthR.cols(), CvType.CV_64FC1); Core.absdiff(mouthR, mouthG, dst); // Core.divide(dst, mouthR, mc.mouthProcessedMat); mc.mouthProcessedMat = dst; mc.mouthProcessedMat.convertTo(mc.mouthProcessedMat, CvType.CV_8UC1); Imgproc.equalizeHist(mc.mouthProcessedMat, mc.mouthProcessedMat); // Imgproc.blur(mc.mouthProcessedMat, mc.mouthProcessedMat, new Size(4,4)); // Imgproc.morphologyEx(mc.mouthProcessedMat, mc.mouthProcessedMat, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(4,4))); Imgproc.threshold(mc.mouthProcessedMat, mc.mouthProcessedMat, 230, 255, THRESH_BINARY); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(mc.mouthProcessedMat, contours, new Mat(), Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); //find the biggest contour int maxSize = -1; int tmpSize = -1; int index = -1; Rect centMouthRect = new Rect(mc.mouthRect.x + mc.mouthRect.width / 4, mc.mouthRect.y + mc.mouthRect.height / 4, mc.mouthRect.width / 2, mc.mouthRect.height / 2); if (contours.size() != 0) { maxSize = contours.get(0).toArray().length; tmpSize = 0; index = 0; } //find max contour for (int j = 0; j < contours.size(); ++j) { //if contour is vertical, exclude it Rect boundRect = Imgproc.boundingRect(contours.get(j)); int centX = mc.mouthRect.x + boundRect.x + boundRect.width / 2; int centY = mc.mouthRect.y + boundRect.y + boundRect.height / 2; // LOG.info("Center = " + centX + "; " + centY); // LOG.info("Rect = " + centMouthRect.x + "; " + centMouthRect.y); if (!centMouthRect.contains(new Point(centX, centY))) continue; tmpSize = contours.get(j).toArray().length; LOG.info("Contour " + j + "; size = " + tmpSize); if (tmpSize > maxSize) { maxSize = tmpSize; index = j; } } //appproximate curve Point[] p1 = contours.get(index).toArray(); MatOfPoint2f p2 = new MatOfPoint2f(p1); MatOfPoint2f p3 = new MatOfPoint2f(); Imgproc.approxPolyDP(p2, p3, 1, true); p1 = p3.toArray(); MatOfInt tmpMatOfPoint = new MatOfInt(); Imgproc.convexHull(new MatOfPoint(p1), tmpMatOfPoint); Rect boundRect = Imgproc.boundingRect(new MatOfPoint(p1)); if (boundRect.area() / mc.mouthRect.area() > 0.3) return null; int size = (int) tmpMatOfPoint.size().height; Point[] _p1 = new Point[size]; int[] a = tmpMatOfPoint.toArray(); _p1[0] = new Point(p1[a[0]].x + mc.mouthRect.x, p1[a[0]].y + mc.mouthRect.y); Core.circle(mc.origFrame, _p1[0], 3, new Scalar(0, 0, 255), -1); for (int i = 1; i < size; i++) { _p1[i] = new Point(p1[a[i]].x + mc.mouthRect.x, p1[a[i]].y + mc.mouthRect.y); Core.circle(mc.origFrame, _p1[i], 3, new Scalar(0, 0, 255), -1); Core.line(mc.origFrame, _p1[i - 1], _p1[i], new Scalar(255, 0, 0), 2); } Core.line(mc.origFrame, _p1[size - 1], _p1[0], new Scalar(255, 0, 0), 2); /* contours.set(index, new MatOfPoint(_p1)); mc.mouthProcessedMat.setTo(new Scalar(0)); Imgproc.drawContours(mc.mouthProcessedMat, contours, index, new Scalar(255), -1); */ mc.mouthMatOfPoint = _p1; MatOfPoint matOfPoint = new MatOfPoint(_p1); mc.mouthBoundRect = Imgproc.boundingRect(matOfPoint); mc.features.mouthBoundRect = mc.mouthBoundRect; /**extract feature points: 1 most left * 2 most right * 3,4 up * 5,6 down */ // mc.mouthMatOfPoint = extractFeaturePoints(contours.get(index)); return null; }