List of usage examples for org.opencv.core Mat submat
public Mat submat(Rect roi)
From source file:net.bsrc.cbod.opencv.OpenCV.java
/** * @param imagePath name of the orginal image * @param mapFilePath name of the orginal image's map file * @return/* w w w . j av a 2 s . c o m*/ */ public static List<Mat> getSegmentedRegions(String imagePath, String mapFilePath, boolean isBlackBg) { Mat org = getImageMat(imagePath); RegionMap regionMap = RegionMapFactory.getRegionMap(imagePath, mapFilePath); List<Mat> result = new ArrayList<Mat>(); Mat map = regionMap.getMap(); for (Integer label : regionMap.getLabels()) { List<Point> points = new ArrayList<Point>(); for (int i = 0; i < map.rows(); i++) { for (int j = 0; j < map.cols(); j++) { double[] temp = map.get(i, j); if (temp[0] == label) { // Warning! col=x=j , row=y=i points.add(new Point(j, i)); } } } Point[] arr = points.toArray(new Point[points.size()]); Rect rect = Imgproc.boundingRect(new MatOfPoint(arr)); Mat region; if (isBlackBg) { region = getImageWithBlackBg(org, points).submat(rect); } else { region = org.submat(rect); } result.add(region); } return result; }
From source file:net.bsrc.cbod.opencv.OpenCV.java
/** * @param imagePath//from w w w. ja v a 2s .c om * @param mapFilePath * @param isBlackBg * @return */ public static List<ImageModel> getSegmentedRegionsAsImageModels(String imagePath, String mapFilePath, boolean isBlackBg) { Mat org = getImageMat(imagePath); RegionMap regionMap = RegionMapFactory.getRegionMap(imagePath, mapFilePath); List<ImageModel> result = new ArrayList<ImageModel>(); Mat map = regionMap.getMap(); for (Integer label : regionMap.getLabels()) { List<Point> points = new ArrayList<Point>(); for (int i = 0; i < map.rows(); i++) { for (int j = 0; j < map.cols(); j++) { double[] temp = map.get(i, j); if (temp[0] == label) { // Warning! col=x=j , row=y=i points.add(new Point(j, i)); } } } Point[] arr = points.toArray(new Point[points.size()]); Rect rect = null; try { rect = Imgproc.boundingRect(new MatOfPoint(arr)); } catch (Exception ex) { logger.error("", ex); continue; } Mat region; if (isBlackBg) { region = getImageWithBlackBg(org, points).submat(rect); } else { region = org.submat(rect); } ImageModel imgModel = new ImageModel(); imgModel.setMat(region); imgModel.setRelativeToOrg(rect); result.add(imgModel); } return result; }
From source file:nz.ac.auckland.lablet.vision.CamShiftTracker.java
License:Open Source License
/** * Internally sets the region of interest (ROI) to track. * Only needs to be set once, unless the region of interest changes. * * @param frame The frame to extract the ROI from. * @param x The x coordinate of the ROI (top left). * @param y The y coordinate of the ROI (top left). * @param width The width of the ROI.//from w w w . ja v a2s . co m * @param height The height of the ROI. */ public void setRegionOfInterest(Bitmap frame, int x, int y, int width, int height) { size = new Size(frame.getWidth(), frame.getHeight()); hsv = new Mat(size, CvType.CV_8UC3); hue = new Mat(size, CvType.CV_8UC3); mask = new Mat(size, CvType.CV_8UC3); hist = new Mat(size, CvType.CV_8UC3); bgr = new Mat(); backproj = new Mat(); Mat image = new Mat(); Utils.bitmapToMat(frame, image); // Mat out = new Mat(image.rows(), image.cols(), image.type()); // image.convertTo(out, -1, 2.0, 2.0); // image = out; trackWindow = new Rect(x, y, width, height); Mat bgrRoi = image.submat(trackWindow); Pair<Scalar, Scalar> minMaxHsv = getMinMaxHsv(bgrRoi, 2); hsvMin = minMaxHsv.first; hsvMax = minMaxHsv.second; toHsv(image, hsvMin, hsvMax); Mat hsvRoi = hsv.submat(trackWindow); Mat maskRoi = mask.submat(trackWindow); ArrayList<Mat> hsvRois = new ArrayList<>(); hsvRois.add(hsvRoi); Imgproc.calcHist(hsvRois, new MatOfInt(0), maskRoi, hist, histSize, ranges); Core.normalize(hist, hist, 0, 255, Core.NORM_MINMAX); }
From source file:opencv.CaptchaDetection.java
/*** * ??, ROI//from w ww. j a v a2 s .c o m * @param src * @return */ private static List<Mat> find_number(Mat src) { Mat src_tmp = src.clone(); // Imgproc.dilate(src_tmp, src_tmp, new Mat()); // ? Mat canny_edge = new Mat(); Imgproc.blur(src_tmp, src_tmp, new Size(3, 3)); Imgproc.Canny(src_tmp, canny_edge, 50, 150, 3, false); // List<MatOfPoint> contours = new ArrayList<>(); Imgproc.findContours(canny_edge, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); List<Rect> boundRect = new ArrayList<>(); // ??, ?? for (int i = 0; i < contours.size(); i++) { MatOfPoint2f tmp_mp2f_1 = new MatOfPoint2f(); MatOfPoint2f tmp_mp2f_2 = new MatOfPoint2f(); contours.get(i).convertTo(tmp_mp2f_1, CvType.CV_32FC2); Imgproc.approxPolyDP(tmp_mp2f_1, tmp_mp2f_2, 3, true); tmp_mp2f_2.convertTo(contours.get(i), CvType.CV_32S); Rect rect = Imgproc.boundingRect(contours.get(i)); //if (rect.area() > 300) //out.println("h : " + rect.height + ", w : " + rect.width + ", aera : " + rect.area()); if (rect.height >= 21 && rect.width >= 21 && rect.area() >= 700) boundRect.add(rect); } // ?? for (Rect rect : boundRect) { Scalar color = new Scalar(128); Imgproc.rectangle(src_tmp, rect.tl(), rect.br(), color, 2, 8, 0); } // ??? Collections.sort(boundRect, rectSort); List<Mat> numRoi = new ArrayList<>(); for (Rect rect : boundRect) numRoi.add(src.submat(rect)); //for (Mat roi : numRoi) //showResult(roi, "roi"); return numRoi; }
From source file:opencv.CaptchaDetection.java
/*** * ?/*w w w. ja v a 2 s . c om*/ * @param src * @return */ private static String dect_number(List<Mat> src) { String answer = ""; for (Mat numRoi : src) { Mat zoomNum = new Mat(numRoi.rows() * 2, numRoi.cols() * 2, CvType.CV_8UC1, new Scalar(0)); numRoi.copyTo( zoomNum.submat(new Rect(numRoi.cols() / 2, numRoi.rows() / 2, numRoi.cols(), numRoi.rows()))); double matchMin = Double.MAX_VALUE; int matchSample = 0; for (Map.Entry<Integer, List<Mat>> iter : sampleMat.entrySet()) { for (Mat sample : iter.getValue()) { int result_cols = zoomNum.cols() - sample.cols() + 1; int result_rows = zoomNum.rows() - sample.rows() + 1; Mat resultImg = new Mat(result_rows, result_cols, CvType.CV_32FC1); Imgproc.matchTemplate(zoomNum, sample, resultImg, Imgproc.TM_SQDIFF); Core.MinMaxLocResult mmr = Core.minMaxLoc(resultImg); if (matchMin > mmr.minVal) { matchMin = mmr.minVal; matchSample = iter.getKey(); } } } answer += matchSample / 2; //out.println("NumRio\tmatch sample : " + matchSample + "\tmatch value : " + matchMin); } //out.println("Answer is : " + answer); return answer; }
From source file:opencv.fark.FarkBulMainFrame.java
@Override public void mouseReleased(MouseEvent me) { System.out.println("mouse brakld"); if (drag && reference_value == 0) { System.out.println("ierdeyim"); try {// w w w .j a v a2s .c om Rect r1 = new Rect(first_point, second_point); Mat ref = new Mat(); web_cam.copyTo(ref); reference_section = ref.submat(r1); MatToBufImg matToBufImage = new MatToBufImg(); matToBufImage.setMatrix(reference_section, ".jpg"); g1.drawImage(matToBufImage.getBufferedImage(), 0, 0, null); Highgui.imwrite("D:\\refResim.jpg", reference_section); } catch (CvException e) { System.out.println(" OpenCV Sorun Olutu"); } catch (Exception ex) { System.out.println("Java sorun Olutu"); } staticFirstPoint = first_point; staticSecondPoint = second_point; first_point = null; second_point = null; } drag = false; }
From source file:opencv.fark.VideoSecMainFrame.java
@Override public void mouseReleased(MouseEvent me) { if (drag && referans_degeri == 0) { // System.out.println("Mouse brakma eylemi aktif"); // System.out.println("firstpoint"+first_point); try {//from ww w . jav a 2s . c o m if (staticFirstPoint.x < staticSecondPoint.x) { staticFirstPoint.x += 1; staticFirstPoint.y += 1; } else { staticSecondPoint.x += 1; staticSecondPoint.y += 1; } Rect r1 = new Rect(staticFirstPoint, staticSecondPoint); Mat ref = new Mat(); frame.copyTo(ref); reference_section = ref.submat(r1); // MatToBufImg matToBufImage = new MatToBufImg(); // matToBufImage.setMatrix(reference_section, ".jpg"); // g1.drawImage(matToBufImage.getBufferedImage(), 0, 0, null); Highgui.imwrite("D:\\testResim\\refResim.png", reference_section); } catch (CvException e) { System.out.println(" OpenCV Sorun Olutu"); } catch (Exception ex) { System.out.println("Java sorun Olutu"); } } // first if end drag = false; }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.detect.DetectStripTask.java
License:Open Source License
@Nullable @Override//from ww w . j ava 2 s .c o m protected Void doInBackground(Intent... params) { Intent intent = params[0]; if (intent == null) { return null; } String uuid = intent.getStringExtra(Constant.UUID); StripTest stripTest = new StripTest(); int numPatches = stripTest.getPatchCount(uuid); format = intent.getIntExtra(Constant.FORMAT, ImageFormat.NV21); width = intent.getIntExtra(Constant.WIDTH, 0); height = intent.getIntExtra(Constant.HEIGHT, 0); if (width == 0 || height == 0) { return null; } JSONArray imagePatchArray = null; int imageCount = -1; Mat labImg; // Mat for image from NV21 data Mat labStrip; // Mat for detected strip try { String json = FileUtil.readFromInternalStorage(context, Constant.IMAGE_PATCH); imagePatchArray = new JSONArray(json); } catch (Exception e) { Timber.e(e); } for (int i = 0; i < numPatches; i++) { try { if (imagePatchArray != null) { // sub-array for each patch JSONArray array = imagePatchArray.getJSONArray(i); // get the image number from the json array int imageNo = array.getInt(0); if (imageNo > imageCount) { // Set imageCount to current number imageCount = imageNo; byte[] data = FileUtil.readByteArray(context, Constant.DATA + imageNo); if (data == null) { throw new IOException(); } //make a L,A,B Mat object from data try { labImg = makeLab(data); } catch (Exception e) { if (context != null) { Timber.e(e); } continue; } //perspectiveTransform try { if (labImg != null) { warp(labImg, imageNo); } } catch (Exception e) { if (context != null) { Timber.e(e); } continue; } //divide into calibration and strip areas try { if (context != null) { divideIntoCalibrationAndStripArea(); } } catch (Exception e) { Timber.e(e); continue; } //save warped image to external storage // if (DEVELOP_MODE) { // Mat rgb = new Mat(); // Imgproc.cvtColor(warpMat, rgb, Imgproc.COLOR_Lab2RGB); // Bitmap bitmap = Bitmap.createBitmap(rgb.width(), rgb.height(), Bitmap.Config.ARGB_8888); // Utils.matToBitmap(rgb, bitmap); // // //if (FileUtil.isExternalStorageWritable()) { // FileUtil.writeBitmapToExternalStorage(bitmap, "/warp", UUID.randomUUID().toString() + ".png"); //} // //Bitmap.createScaledBitmap(bitmap, BITMAP_SCALED_WIDTH, BITMAP_SCALED_HEIGHT, false); // } //calibrate Mat calibrationMat; try { CalibrationResultData calResult = getCalibratedImage(warpMat); if (calResult == null) { return null; } else { calibrationMat = calResult.getCalibratedImage(); } // Log.d(this.getClass().getSimpleName(), "E94 error mean: " + String.format(Locale.US, "%.2f", calResult.meanE94) // + ", max: " + String.format(Locale.US, "%.2f", calResult.maxE94) // + ", total: " + String.format(Locale.US, "%.2f", calResult.totalE94)); // if (AppPreferences.isDiagnosticMode()) { // listener.showError("E94 mean: " + String.format(Locale.US, "%.2f", calResult.meanE94) // + ", max: " + String.format(Locale.US, "%.2f", calResult.maxE94) // + ", total: " + String.format(Locale.US, "%.2f", calResult.totalE94)); // } } catch (Exception e) { Timber.e(e); return null; } //show calibrated image // if (DEVELOP_MODE) { // Mat rgb = new Mat(); // Imgproc.cvtColor(calibrationMat, rgb, Imgproc.COLOR_Lab2RGB); // Bitmap bitmap = Bitmap.createBitmap(rgb.width(), rgb.height(), Bitmap.Config.ARGB_8888); // Utils.matToBitmap(rgb, bitmap); // if (FileUtil.isExternalStorageWritable()) { // FileUtil.writeBitmapToExternalStorage(bitmap, "/warp", UUID.randomUUID().toString() + "_cal.png"); // } // //Bitmap.createScaledBitmap(bitmap, BITMAP_SCALED_WIDTH, BITMAP_SCALED_HEIGHT, false); // } // cut out black area that contains the strip Mat stripArea = null; if (roiStripArea != null) { stripArea = calibrationMat.submat(roiStripArea); } if (stripArea != null) { Mat strip = null; try { StripTest.Brand brand = stripTest.getBrand(uuid); strip = OpenCVUtil.detectStrip(stripArea, brand, ratioW, ratioH); } catch (Exception e) { Timber.e(e); } String error = ""; if (strip != null) { labStrip = strip.clone(); } else { if (context != null) { Timber.e(context.getString(R.string.error_calibrating)); } labStrip = stripArea.clone(); error = Constant.ERROR; //draw a red cross over the image Scalar red = RED_LAB_COLOR; // Lab color Imgproc.line(labStrip, new Point(0, 0), new Point(labStrip.cols(), labStrip.rows()), red, 2); Imgproc.line(labStrip, new Point(0, labStrip.rows()), new Point(labStrip.cols(), 0), red, 2); } try { // create byte[] from Mat and store it in internal storage // In order to restore the byte array, we also need the rows and columns dimensions // these are stored in the last 8 bytes int dataSize = labStrip.cols() * labStrip.rows() * 3; byte[] payload = new byte[dataSize + 8]; byte[] matByteArray = new byte[dataSize]; labStrip.get(0, 0, matByteArray); // pack cols and rows into byte arrays byte[] rows = FileUtil.leIntToByteArray(labStrip.rows()); byte[] cols = FileUtil.leIntToByteArray(labStrip.cols()); // append them to the end of the array, in order rows, cols System.arraycopy(matByteArray, 0, payload, 0, dataSize); System.arraycopy(rows, 0, payload, dataSize, 4); System.arraycopy(cols, 0, payload, dataSize + 4, 4); FileUtil.writeByteArray(context, payload, Constant.STRIP + imageNo + error); } catch (Exception e) { Timber.e(e); } } } } } catch (@NonNull JSONException | IOException e) { if (context != null) { Timber.e(context.getString(R.string.error_cut_out_strip)); } } } return null; }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.OpenCVUtil.java
License:Open Source License
@SuppressWarnings("UnusedParameters") public static Mat detectStrip(Mat stripArea, StripTest.Brand brand, double ratioW, double ratioH) { List<Mat> channels = new ArrayList<>(); Mat sArea = stripArea.clone();// ww w . j a v a2 s.co m // Gaussian blur Imgproc.medianBlur(sArea, sArea, 3); Core.split(sArea, channels); // create binary image Mat binary = new Mat(); // determine min and max NOT USED Imgproc.threshold(channels.get(0), binary, 128, MAX_RGB_INT_VALUE, Imgproc.THRESH_BINARY); // compute first approximation of line through length of the strip final WeightedObservedPoints points = new WeightedObservedPoints(); final WeightedObservedPoints corrPoints = new WeightedObservedPoints(); double tot, yTot; for (int i = 0; i < binary.cols(); i++) { // iterate over cols tot = 0; yTot = 0; for (int j = 0; j < binary.rows(); j++) { // iterate over rows if (binary.get(j, i)[0] > 128) { yTot += j; tot++; } } if (tot > 0) { points.add((double) i, yTot / tot); } } // order of coefficients is (b + ax), so [b, a] final PolynomialCurveFitter fitter = PolynomialCurveFitter.create(1); List<WeightedObservedPoint> pointsList = points.toList(); final double[] coefficient = fitter.fit(pointsList); // second pass, remove outliers double estimate, actual; for (int i = 0; i < pointsList.size(); i++) { estimate = coefficient[1] * pointsList.get(i).getX() + coefficient[0]; actual = pointsList.get(i).getY(); if (actual > LOWER_PERCENTAGE_BOUND * estimate && actual < UPPER_PERCENTAGE_BOUND * estimate) { //if the point differs less than +/- 10%, keep the point corrPoints.add(pointsList.get(i).getX(), pointsList.get(i).getY()); } } final double[] coefficientCorr = fitter.fit(corrPoints.toList()); double slope = coefficientCorr[1]; double offset = coefficientCorr[0]; // compute rotation angle double rotAngleDeg = Math.atan(slope) * 180 / Math.PI; //determine a point on the line, in the middle of strip, in the horizontal middle of the whole image int midPointX = binary.cols() / 2; int midPointY = (int) Math.round(midPointX * slope + offset); // rotate around the midpoint, to straighten the binary strip Mat dstBinary = new Mat(binary.rows(), binary.cols(), binary.type()); Point center = new Point(midPointX, midPointY); Mat rotMat = Imgproc.getRotationMatrix2D(center, rotAngleDeg, 1.0); Imgproc.warpAffine(binary, dstBinary, rotMat, binary.size(), Imgproc.INTER_CUBIC + Imgproc.WARP_FILL_OUTLIERS); // also apply rotation to colored strip Mat dstStrip = new Mat(stripArea.rows(), stripArea.cols(), stripArea.type()); Imgproc.warpAffine(stripArea, dstStrip, rotMat, binary.size(), Imgproc.INTER_CUBIC + Imgproc.WARP_FILL_OUTLIERS); // Compute white points in each row double[] rowCount = new double[dstBinary.rows()]; int rowTot; for (int i = 0; i < dstBinary.rows(); i++) { // iterate over rows rowTot = 0; for (int j = 0; j < dstBinary.cols(); j++) { // iterate over cols if (dstBinary.get(i, j)[0] > 128) { rowTot++; } } rowCount[i] = rowTot; } // find width by finding rising and dropping edges // rising edge = largest positive difference // falling edge = largest negative difference int risePos = 0; int fallPos = 0; double riseVal = 0; double fallVal = 0; for (int i = 0; i < dstBinary.rows() - 1; i++) { if (rowCount[i + 1] - rowCount[i] > riseVal) { riseVal = rowCount[i + 1] - rowCount[i]; risePos = i + 1; } if (rowCount[i + 1] - rowCount[i] < fallVal) { fallVal = rowCount[i + 1] - rowCount[i]; fallPos = i; } } // cut out binary strip Point stripTopLeft = new Point(0, risePos); Point stripBottomRight = new Point(dstBinary.cols(), fallPos); org.opencv.core.Rect stripAreaRect = new org.opencv.core.Rect(stripTopLeft, stripBottomRight); Mat binaryStrip = dstBinary.submat(stripAreaRect); // also cut out colored strip Mat colorStrip = dstStrip.submat(stripAreaRect); // now right end of strip // method: first rising edge double[] colCount = new double[binaryStrip.cols()]; int colTotal; for (int i = 0; i < binaryStrip.cols(); i++) { // iterate over cols colTotal = 0; for (int j = 0; j < binaryStrip.rows(); j++) { // iterate over rows if (binaryStrip.get(j, i)[0] > 128) { colTotal++; } } //Log.d("Caddisfly", String.valueOf(colTotal)); colCount[i] = colTotal; } stripAreaRect = getStripRectangle(binaryStrip, colCount, brand.getStripLength(), ratioW); Mat resultStrip = colorStrip.submat(stripAreaRect).clone(); // release Mat objects stripArea.release(); sArea.release(); binary.release(); dstBinary.release(); dstStrip.release(); binaryStrip.release(); colorStrip.release(); return resultStrip; }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java
License:Open Source License
@NonNull public static Mat concatenate(@NonNull Mat m1, @NonNull Mat m2) { int width = Math.max(m1.cols(), m2.cols()); int height = m1.rows() + m2.rows(); Mat result = new Mat(height, width, CvType.CV_8UC3, new Scalar(MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE)); // rect works with x, y, width, height Rect roi1 = new Rect(0, 0, m1.cols(), m1.rows()); Mat roiMat1 = result.submat(roi1); m1.copyTo(roiMat1);/*from ww w . ja va2 s. c o m*/ Rect roi2 = new Rect(0, m1.rows(), m2.cols(), m2.rows()); Mat roiMat2 = result.submat(roi2); m2.copyTo(roiMat2); return result; }