List of usage examples for org.opencv.core Mat size
public Size size()
From source file:javafx1.JavaFX1.java
/** * Get the average hue value of the image starting from its Hue channel * histogram//from w w w . ja v a2 s.com * * @param hsvImg the current frame in HSV * @param hueValues the Hue component of the current frame * @return the average Hue value */ private double getHistAverage(Mat hsvImg, Mat hueValues) { // init double average = 0.0; Mat hist_hue = new Mat(); // 0-180: range of Hue values MatOfInt histSize = new MatOfInt(180); List<Mat> hue = new ArrayList<>(); hue.add(hueValues); // compute the histogram Imgproc.calcHist(hue, new MatOfInt(0), new Mat(), hist_hue, histSize, new MatOfFloat(0, 179)); // get the average Hue value of the image // (sum(bin(h)*h))/(image-height*image-width) // ----------------- // equivalent to get the hue of each pixel in the image, add them, and // divide for the image size (height and width) for (int h = 0; h < 180; h++) { // for each bin, get its value and multiply it for the corresponding // hue average += (hist_hue.get(h, 0)[0] * h); } // return the average hue of the image return average = average / hsvImg.size().height / hsvImg.size().width; }
From source file:karthik.Barcode.Barcode.java
License:Open Source License
protected Mat scale_candidateBarcode(Mat candidate) { // resizes candidate image to have at least MIN_COLS columns and MIN_ROWS rows // called when RESIZE_BEFORE_DECODE is set - seems to help ZXing decode barcode int MIN_COLS = 200; int MIN_ROWS = 200; int num_rows = candidate.rows(); int num_cols = candidate.cols(); if ((num_cols > MIN_COLS) && (num_rows > MIN_ROWS)) return candidate; if (num_cols < MIN_COLS) { num_rows = (int) (num_rows * MIN_COLS / (1.0 * num_cols)); num_cols = MIN_COLS;/*from ww w . jav a 2 s. c om*/ } if (num_rows < MIN_ROWS) { num_cols = (int) (num_cols * MIN_ROWS / (1.0 * num_rows)); num_rows = MIN_ROWS; } Mat result = Mat.zeros(num_rows, num_cols, candidate.type()); Imgproc.resize(candidate, result, result.size(), 0, 0, Imgproc.INTER_CUBIC); return result; }
From source file:karthik.Barcode.CandidateMatrixBarcode.java
License:Open Source License
CandidateResult NormalizeCandidateRegion(double angle) { /* candidateRegion is the RotatedRect which contains a candidate region for the barcode // angle is the rotation angle or USE_ROTATED_RECT_ANGLE for this function to // estimate rotation angle from the rect parameter // returns Mat containing cropped area(region of interest) with just the barcode // The barcode region is from the *original* image, not the scaled image // the cropped area is also rotated as necessary to be horizontal or vertical rather than skewed // Some parts of this function are from http://felix.abecassis.me/2011/10/opencv-rotation-deskewing/ // and http://stackoverflow.com/questions/22041699/rotate-an-image-without-cropping-in-opencv-in-c *//*w w w. ja v a 2 s .c o m*/ double rotation_angle; CandidateResult result = new CandidateResult(); // scale candidate region back up to original size to return cropped part from *original* image // need the 1.0 there to force floating-point arithmetic from int values double scale_factor = img_details.src_original.rows() / (1.0 * img_details.src_grayscale.rows()); // expand the region found - this helps capture the entire code including the border zone candidateRegion.size.width += 2 * params.RECT_WIDTH; candidateRegion.size.height += 2 * params.RECT_HEIGHT; // calculate location of rectangle in original image and its corner points RotatedRect scaledRegion = new RotatedRect(candidateRegion.center, candidateRegion.size, candidateRegion.angle); scaledRegion.center.x = scaledRegion.center.x * scale_factor; scaledRegion.center.y = scaledRegion.center.y * scale_factor; scaledRegion.size.height *= scale_factor; scaledRegion.size.width *= scale_factor; scaledRegion.points(img_details.scaledCorners); // lets get the coordinates of the ROI in the original image and save it result.ROI_coords = Arrays.copyOf(img_details.scaledCorners, 4); // get the bounding rectangle of the ROI by sorting its corner points // we do it manually because RotatedRect can generate corner points outside the Mat area Arrays.sort(img_details.scaledCorners, CandidateBarcode.get_x_comparator()); int leftCol = (int) img_details.scaledCorners[0].x; int rightCol = (int) img_details.scaledCorners[3].x; leftCol = (leftCol < 0) ? 0 : leftCol; rightCol = (rightCol > img_details.src_original.cols() - 1) ? img_details.src_original.cols() - 1 : rightCol; Arrays.sort(img_details.scaledCorners, CandidateBarcode.get_y_comparator()); int topRow = (int) img_details.scaledCorners[0].y; int bottomRow = (int) img_details.scaledCorners[3].y; topRow = (topRow < 0) ? 0 : topRow; bottomRow = (bottomRow > img_details.src_original.rows() - 1) ? img_details.src_original.rows() - 1 : bottomRow; Mat ROI_region = img_details.src_original.submat(topRow, bottomRow, leftCol, rightCol); // create a container that is a square with side = diagonal of ROI. // this is large enough to accommodate the ROI region with rotation without cropping it int orig_rows = bottomRow - topRow; int orig_cols = rightCol - leftCol; int diagonal = (int) Math.sqrt(orig_rows * orig_rows + orig_cols * orig_cols); int newWidth = diagonal + 1; int newHeight = diagonal + 1; int offsetX = (newWidth - orig_cols) / 2; int offsetY = (newHeight - orig_rows) / 2; Mat enlarged_ROI_container = new Mat(newWidth, newHeight, img_details.src_original.type()); enlarged_ROI_container.setTo(ZERO_SCALAR); // copy ROI to centre of container and rotate it ROI_region.copyTo(enlarged_ROI_container.rowRange(offsetY, offsetY + orig_rows).colRange(offsetX, offsetX + orig_cols)); Point enlarged_ROI_container_centre = new Point(enlarged_ROI_container.rows() / 2.0, enlarged_ROI_container.cols() / 2.0); Mat rotated = Mat.zeros(enlarged_ROI_container.size(), enlarged_ROI_container.type()); if (angle == Barcode.USE_ROTATED_RECT_ANGLE) rotation_angle = estimate_barcode_orientation(); else rotation_angle = angle; // perform the affine transformation img_details.rotation_matrix = Imgproc.getRotationMatrix2D(enlarged_ROI_container_centre, rotation_angle, 1.0); img_details.rotation_matrix.convertTo(img_details.rotation_matrix, CvType.CV_32F); // convert type so matrix multip. works properly img_details.newCornerCoord.setTo(ZERO_SCALAR); // convert scaledCorners to contain locations of corners in enlarged_ROI_container Mat img_details.scaledCorners[0] = new Point(offsetX, offsetY); img_details.scaledCorners[1] = new Point(offsetX, offsetY + orig_rows); img_details.scaledCorners[2] = new Point(offsetX + orig_cols, offsetY); img_details.scaledCorners[3] = new Point(offsetX + orig_cols, offsetY + orig_rows); // calculate the new location for each corner point of the rectangle ROI after rotation for (int r = 0; r < 4; r++) { img_details.coord.put(0, 0, img_details.scaledCorners[r].x); img_details.coord.put(1, 0, img_details.scaledCorners[r].y); Core.gemm(img_details.rotation_matrix, img_details.coord, 1, img_details.delta, 0, img_details.newCornerCoord); updatePoint(img_details.newCornerPoints.get(r), img_details.newCornerCoord.get(0, 0)[0], img_details.newCornerCoord.get(1, 0)[0]); } rotated.setTo(ZERO_SCALAR); Imgproc.warpAffine(enlarged_ROI_container, rotated, img_details.rotation_matrix, enlarged_ROI_container.size(), Imgproc.INTER_CUBIC); // sort rectangles points in order by first sorting all 4 points based on x // we then sort the first two based on y and then the next two based on y // this leaves the array in order top-left, bottom-left, top-right, bottom-right Collections.sort(img_details.newCornerPoints, CandidateBarcode.get_x_comparator()); Collections.sort(img_details.newCornerPoints.subList(0, 2), CandidateBarcode.get_y_comparator()); Collections.sort(img_details.newCornerPoints.subList(2, 4), CandidateBarcode.get_y_comparator()); // calc height and width of rectangular region double height = length(img_details.newCornerPoints.get(1), img_details.newCornerPoints.get(0)); double width = length(img_details.newCornerPoints.get(2), img_details.newCornerPoints.get(0)); // create destination points for warpPerspective to map to updatePoint(img_details.transformedPoints.get(0), 0, 0); updatePoint(img_details.transformedPoints.get(1), 0, height); updatePoint(img_details.transformedPoints.get(2), width, 0); updatePoint(img_details.transformedPoints.get(3), width, height); Mat perspectiveTransform = Imgproc.getPerspectiveTransform( Converters.vector_Point2f_to_Mat(img_details.newCornerPoints), Converters.vector_Point2f_to_Mat(img_details.transformedPoints)); Mat perspectiveOut = Mat.zeros((int) height + 2, (int) width + 2, CvType.CV_32F); Imgproc.warpPerspective(rotated, perspectiveOut, perspectiveTransform, perspectiveOut.size(), Imgproc.INTER_CUBIC); result.ROI = perspectiveOut; return result; }
From source file:logic.featurepointextractor.EyeIrisesFPE.java
@Override public Point[] detect(MatContainer mc) { Mat eyePairMat = mc.grayEyePairMat;//w w w. j a v a 2s . c o m Rect eyePairRect = mc.eyePairRect; Rect faceRect = mc.faceRect; //search for eye centers Mat circlesMat = new Mat(); double minDist = 2 * eyePairRect.width / 5; int minRad = eyePairRect.height / 5; int maxRad = 2 * eyePairRect.height / 3; Imgproc.HoughCircles(eyePairMat, circlesMat, Imgproc.CV_HOUGH_GRADIENT, 3.0, minDist, 200.0, 20.0, minRad, maxRad); float arr1[] = new float[3]; float arr2[] = new float[3]; if (circlesMat.size().width == 2) { circlesMat.get(0, 0, arr1); circlesMat.get(0, 1, arr2); float f11 = arr1[0], f12 = arr1[1], f21 = arr2[0], f22 = arr2[1]; if (Math.abs(f11 - f21) < Parameters.irisXDifferencesThreshold * eyePairRect.width && Math.abs(f12 - f22) > Parameters.irisYDifferencesThreshold) { //find where left and right eye if (f11 < f21) //left-right return new Point[] { new Point(f11 + faceRect.x + eyePairRect.x, f12 + faceRect.y + eyePairRect.y), new Point(f21 + faceRect.x + eyePairRect.x, f22 + faceRect.y + eyePairRect.y) }; else //right-left return new Point[] { new Point(f21 + faceRect.x + eyePairRect.x, f22 + faceRect.y + eyePairRect.y), new Point(f11 + faceRect.x + eyePairRect.x, f12 + faceRect.y + eyePairRect.y) }; } } LOG.warn("Extract eye iris: FAIL"); return null; }
From source file:Main.Camera.CameraController.java
private void TakeShot(Mat I) { if (PictureCount <= 6) { System.err.println("CURRENT I WIDTH: " + I.width()); System.err.println("CURRENT I Height: " + I.height()); System.err.println("CURRENT FRAM WIDTH: " + currentFrame.fitWidthProperty().intValue()); System.err.println("CURRENT FRAM Height: " + currentFrame.fitHeightProperty().intValue()); //Mat croppedimage = I; //cropImage(image,rect); Mat resizeimage = new Mat(); Size sz = new Size(150, 150); Imgproc.resize(I, resizeimage, sz); Mat uncropped = I; uncropped.size().height = 10; uncropped.size().width = 30;/* www . j av a2s .c o m*/ //Mat cropped = fullImage(Rect(0,0,(I.width()/2),(I.height()/2)); // // Print.Say("CURRENT PICTURE"); // Rect roi = new Rect(FaceRect.xProperty().intValue(),FaceRect.yProperty().intValue(), FaceRect.widthProperty().intValue(), FaceRect.heightProperty().intValue()); // // Mat cropped = new Mat(uncropped, roi); BufferedImage BI = null; //Image imageToShow02 = mat2Image(cropped); Image imageToShow02 = mat2Image(resizeimage); currentPicture.setImage(imageToShow02); //Pictures[PictureCount] = matToBufferedImage(cropped,BI ); Pictures[PictureCount] = matToBufferedImage(resizeimage, BI); Print.Say("\nPictures:" + Pictures[PictureCount] + "\n"); PictureCount++; } Print.Say("\nSHOT TAKEN\n" + PictureCount); }
From source file:net.bsrc.cbod.opencv.OpenCV.java
/** * Helper method/* w w w . j a v a 2 s .c o m*/ * * @param org * @param list * @return */ private static Mat getImageWithBlackBg(Mat org, List<Point> list) { Mat region = Mat.zeros(org.size(), org.type()); for (Point p : list) { int row = (int) p.y; int col = (int) p.x; region.put(row, col, org.get(row, col)); } return region; }
From source file:net.hydex11.opencvinteropexample.MainActivity.java
License:Open Source License
private void example() { RenderScript mRS = RenderScript.create(this); // Loads input image Bitmap inputBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.houseimage); // Puts input image inside an OpenCV mat Mat inputMat = new Mat(); Utils.bitmapToMat(inputBitmap, inputMat); Mat outputMat = new Mat(inputMat.size(), inputMat.type()); // Testing bitmap, used to test that the OpenCV mat actually has bitmap data inside Bitmap initialBitmap = Bitmap.createBitmap(inputMat.width(), inputMat.height(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(inputMat, initialBitmap); // Retrieve OpenCV mat data address long inputMatDataAddress = inputMat.dataAddr(); long outputMatDataAddress = outputMat.dataAddr(); // Creates a RS type that matches the input mat one. Element element = Element.RGBA_8888(mRS); Type.Builder tb = new Type.Builder(mRS, element); tb.setX(inputMat.width());//from ww w . j a v a 2 s.c o m tb.setY(inputMat.height()); Type inputMatType = tb.create(); // Creates a RenderScript allocation that uses directly the OpenCV input mat address Allocation inputAllocation = createTypedAllocationWithDataPointer(mRS, inputMatType, inputMatDataAddress); Allocation outputAllocation = createTypedAllocationWithDataPointer(mRS, inputMatType, outputMatDataAddress); // Define a simple convolve script // Note: here, ANY kernel can be applied! ScriptIntrinsicConvolve3x3 convolve3x3 = ScriptIntrinsicConvolve3x3.create(mRS, element); float convolveCoefficients[] = new float[9]; convolveCoefficients[0] = 1; convolveCoefficients[2] = 1; convolveCoefficients[5] = 1; convolveCoefficients[6] = 1; convolveCoefficients[8] = 1; convolve3x3.setCoefficients(convolveCoefficients); convolve3x3.setInput(inputAllocation); convolve3x3.forEach(outputAllocation); mRS.finish(); // Converts the result to a bitmap Bitmap cvOutputBitmap = Bitmap.createBitmap(outputMat.width(), outputMat.height(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(outputMat, cvOutputBitmap); // Testing bitmap, used to test the RenderScript ouput allocation contents // Note: it is placed here because the copyTo function clears the input buffer Bitmap rsOutputBitmap = Bitmap.createBitmap(outputMat.width(), outputMat.height(), Bitmap.Config.ARGB_8888); outputAllocation.copyTo(rsOutputBitmap); // Testing bitmap, used to test that RenderScript input allocation pointed to the OpenCV mat // Note: it is placed here because the copyTo function clears the input buffer Bitmap rsInitialBitmap = Bitmap.createBitmap(inputMat.width(), inputMat.height(), Bitmap.Config.ARGB_8888); inputAllocation.copyTo(rsInitialBitmap); // Display input and output ImageView originalImageIV = (ImageView) findViewById(R.id.imageView); ImageView inputRSImageIV = (ImageView) findViewById(R.id.imageView2); ImageView outputRSImageIV = (ImageView) findViewById(R.id.imageView3); ImageView outputCVIV = (ImageView) findViewById(R.id.imageView4); originalImageIV.setImageBitmap(initialBitmap); inputRSImageIV.setImageBitmap(rsInitialBitmap); outputRSImageIV.setImageBitmap(rsOutputBitmap); outputCVIV.setImageBitmap(cvOutputBitmap); }
From source file:news_analysis.headlinedetection.HeadLineDetection.java
public boolean isHeadLine(Mat image) { Size imageSize = image.size(); int width = image.width(); int height = image.height(); if (height > 50 && height < 100) { return horizontalChecked(image, width, height); }// w ww .jav a2s .com //VerticleChecked( image, width, height); return false; }
From source file:news_analysis.isimage.IsImage.java
public boolean isImage(Mat image) { Size imageSize = image.size(); int width = image.width(); int height = image.height(); borderDetection = new BorderDetection(); ArrayList<BorderItem> borderItems = borderDetection.getBorder(image, width, height); Mat[] subMat = new Mat[borderItems.size()]; for (int i = 0; i < borderItems.size(); i++) { BorderItem item = borderItems.get(i); if (item.getHeight() > 100 && item.getWidth() > 100) { item = canMaxiMizeBorder(item, item.getMinX(), item.getMaxX(), item.getMinY(), item.getMaxY(), height, width);/*from w w w . j a v a2s. com*/ subMat[i] = image.submat(item.getMinX(), item.getMaxX(), item.getMinY(), item.getMaxY()); //NewsAnalysis.imshow("Sub sub sub" + i, subMat[i]); int horizontal[] = horizontalChecked(subMat[i], item.getHeight() - 1, item.getWidth() - 1); int verticle[] = VerticleChecked(subMat[i], item.getHeight() - 1, item.getWidth() - 1); if (horizontal[0] + horizontal[1] > 110 && verticle[0] + verticle[1] > 110) { return true; } return true; } } return false; }
From source file:news_analysis.NewsAnalysis.java
public static void main(String[] args) throws IOException { file = new File("F:\\AbcFile\\filename.txt"); if (!file.exists()) { file.createNewFile();/* w ww . ja v a2 s.co m*/ } fw = new FileWriter(file.getAbsoluteFile()); bw = new BufferedWriter(fw); bw.flush(); // Load an image file and display it in a window. Mat m1 = Highgui.imread("E:\\Raw Java Project\\Thesis\\test6.jpg"); //imshow("Original", m1); // Do some image processing on the image and display in another window. Mat m2 = new Mat(); Imgproc.bilateralFilter(m1, m2, -1, 50, 10); Imgproc.Canny(m2, m2, 10, 200); imshow("Edge Detected", m2); Size sizeA = m2.size(); System.out.println("width: " + sizeA.width + " Height: " + sizeA.height); int width = (int) sizeA.width; int hight = (int) sizeA.height; int pointLength[][][] = new int[hight][width][2]; for (int i = 0; i < hight; i++) { for (int j = 0; j < width; j++) { double[] data = m2.get(i, j); if (m2.get(i, j)[0] != 0) { pointLength[i][j][0] = 0; pointLength[i][j][1] = 0; continue; } if (j != 0 && m2.get(i, j - 1)[0] == 0) { pointLength[i][j][0] = pointLength[i][j - 1][0]; } else { int count = 0; for (int k = j + 1; k < width; k++) { if (m2.get(i, k)[0] == 0) { count++; } else { break; } } pointLength[i][j][0] = count; } if (i != 0 && m2.get(i - 1, j)[0] == 0) { pointLength[i][j][1] = pointLength[i - 1][j][1]; } else { int count = 0; for (int k = i + 1; k < hight; k++) { if (m2.get(k, j)[0] == 0) { count++; } else { break; } } pointLength[i][j][1] = count; } //System.out.println(data[0]); } } String temp = ""; Mat convertArea = m2.clone(); int[][] balckWhite = new int[hight][width]; for (int i = 0; i < hight; i++) { temp = ""; for (int j = 0; j < width; j++) { if (i == 0 || j == 0 || i == hight - 1 || j == width - 1) { temp = temp + "@"; balckWhite[i][j] = 1; double[] data = m2.get(i, j); data[0] = 255.0; convertArea.put(i, j, data); } else if (pointLength[i][j][0] > 150 && pointLength[i][j][1] > 6) { temp = temp + "@"; balckWhite[i][j] = 1; double[] data = m2.get(i, j); data[0] = 255.0; convertArea.put(i, j, data); } else if (pointLength[i][j][0] > 7 && pointLength[i][j][1] > 200) { temp = temp + "@"; balckWhite[i][j] = 1; double[] data = m2.get(i, j); data[0] = 255.0; convertArea.put(i, j, data); } else { temp = temp + " "; balckWhite[i][j] = 0; double[] data = m2.get(i, j); data[0] = 0.0; convertArea.put(i, j, data); } } //filewrile(temp); } imshow("Convertion", convertArea); IsImage isImage = new IsImage(); HeadLineDetection isHeadline = new HeadLineDetection(); ImageBorderDetectionBFS imgBFS = new ImageBorderDetectionBFS(); ArrayList<BorderItem> borderItems = imgBFS.getBorder(balckWhite, width, hight); Mat[] subMat = new Mat[borderItems.size()]; for (int i = 0; i < borderItems.size(); i++) { subMat[i] = m2.submat(borderItems.get(i).getMinX(), borderItems.get(i).getMaxX(), borderItems.get(i).getMinY(), borderItems.get(i).getMaxY()); if (isImage.isImage(subMat[i])) { System.out.println("subMat" + i + " is an image"); //imshow("subMat" + i, subMat[i]); } else if (isHeadline.isHeadLine(subMat[i])) { System.out.println("subMat" + i + " is an Headline"); //imshow("Headline" + i, subMat[i]); } else { System.out.println("subMat" + i + " is an Column"); imshow("Column" + i, subMat[i]); } //imshow("subMat" + i, subMat[i]); bw.close(); } }