List of usage examples for org.opencv.core Mat cols
public int cols()
From source file:cmib_4_4.NoiseRemove.java
public Mat removeNoisePixcels(Mat noiseGreyImage, Mat rbgImage) { Mat image = noiseGreyImage; Mat image1 = rbgImage;//w w w . ja va2 s. c om int size = (int) (image.total() * image.channels()); byte[] get = new byte[size]; byte[] temp1 = new byte[size]; int size1 = (int) (image1.total() * image1.channels()); byte[] rgb1 = new byte[size1]; for (int i = 0; i < image.rows(); i++) { for (int j = 0; j < image.cols(); j++) { image.get(i, j, get); if (get[0] == -1) { image.put(i, j, 1); } image.get(i, j, get); //System.out.println(get[0]); if (get[0] == 1) { if (i == 0 & j == 0) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j + 1, a1); image.get(i + 1, j + 1, a2); image.get(i + 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 0.0); //System.out.println("1"); } else { image.put(i, j, 0); } } else if (i == 0 & j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j - 1, a1); image.get(i + 1, j - 1, a2); image.get(i + 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("2"); } else { image.put(i, j, 0); } } else if (i == image.rows() & j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { // image1.put(j, i, 1.0); // System.out.println("3"); } else { image.put(i, j, 0); } } else if (j == 0 & i == image.rows()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j + 1, a1); image.get(i - 1, j + 1, a2); image.get(i - 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("4"); } else { image.put(i, j, 0); } } else if (j == 0) { double[] a1 = image.get(i - 1, j); double[] a2 = image.get(i - 1, j + 1); double[] a3 = image.get(i, j + 1); double[] a4 = image.get(i + 1, j + 1); double[] a5 = image.get(i + 1, j); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("5"); } else { image.put(i, j, 0); } } else if (i == 0) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i, j - 1, a1); image.get(i + 1, j - 1, a2); image.get(i + 1, j, a3); image.get(i + 1, j + 1, a4); image.get(i, j + 1, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("6"); } else { image.put(i, j, 0); } } else if (j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); image.get(i + 1, j - 1, a4); image.get(i + 1, j, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("7"); } else { image.put(i, j, 0); } } else if (i == image.rows()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i, j + 1, a1); image.get(i - 1, j + 1, a2); image.get(i - 1, j, a3); image.get(i - 1, j - 1, a4); image.get(i, j - 1, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("8"); } else { image.put(i, j, 0); } } else { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; byte[] a6 = new byte[1]; byte[] a7 = new byte[1]; byte[] a8 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); image.get(i + 1, j - 1, a4); image.get(i + 1, j, a5); image.get(i + 1, j + 1, a6); image.get(i, j + 1, a7); image.get(i - 1, j + 1, a8); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == 1 & a4[0] == -1 & a5[0] == -1) | (a4[0] == -1 & a5[0] == -1 & a6[0] == -1) | (a5[0] == -1 & a6[0] == 1 & a7[0] == -1) | (a6[0] == -1 & a7[0] == -1 & a8[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("9"); } else { image.put(i, j, 0); } } } } } ////////////////////// Obtain RGB final blood cell image ///////////////////////////// for (int i = 0; i < image.rows(); i++) { for (int j = 0; j < image.cols(); j++) { image.get(i, j, temp1); image1.get(i, j, rgb1); if (temp1[0] == -1) { image.put(i, j, 1); } image.get(i, j, temp1); byte r = (byte) (rgb1[0] * temp1[0]); byte g = (byte) (rgb1[1] * temp1[0]); byte b = (byte) (rgb1[2] * temp1[0]); image1.put(i, j, new byte[] { r, g, b }); } } return image1; }
From source file:cn.xiongyihui.webcam.JpegFactory.java
License:Open Source License
public void onPreviewFrame(byte[] data, Camera camera) { YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null); mJpegOutputStream.reset();// ww w .ja va 2 s. c om try { //Log.e(TAG, "Beginning to read values!"); double distanceTemplateFeatures = this.globalClass.getDistanceTemplateFeatures(); double xTemplateCentroid = this.globalClass.getXtemplateCentroid(); double yTemplateCentroid = this.globalClass.getYtemplateCentroid(); int x0template = this.globalClass.getX0display(); int y0template = this.globalClass.getY0display(); int x1template = this.globalClass.getX1display(); int y1template = this.globalClass.getY1display(); Mat templateDescriptor = this.globalClass.getTemplateDescriptor(); MatOfKeyPoint templateKeyPoints = this.globalClass.getKeyPoints(); KeyPoint[] templateKeyPointsArray = templateKeyPoints.toArray(); int numberOfTemplateFeatures = this.globalClass.getNumberOfTemplateFeatures(); int numberOfPositiveTemplateFeatures = this.globalClass.getNumberOfPositiveTemplateFeatures(); KeyPoint[] normalisedTemplateKeyPoints = this.globalClass.getNormalisedTemplateKeyPoints(); double normalisedXcentroid = this.globalClass.getNormalisedXcentroid(); double normalisedYcentroid = this.globalClass.getNormalisedYcentroid(); int templateCapturedBitmapWidth = this.globalClass.getTemplateCapturedBitmapWidth(); int templateCapturedBitmapHeight = this.globalClass.getTemplateCapturedBitmapHeight(); //Log.e(TAG, "Ended reading values!"); globalClass.setJpegFactoryDimensions(mWidth, mHeight); double scalingRatio, scalingRatioHeight, scalingRatioWidth; scalingRatioHeight = (double) mHeight / (double) templateCapturedBitmapHeight; scalingRatioWidth = (double) mWidth / (double) templateCapturedBitmapWidth; scalingRatio = (scalingRatioHeight + scalingRatioWidth) / 2; //Just to account for any minor variations. //Log.e(TAG, "Scaling ratio:" + String.valueOf(scalingRatio)); //Log.e("Test", "Captured Bitmap's dimensions: (" + templateCapturedBitmapHeight + "," + templateCapturedBitmapWidth + ")"); //Scale the actual features of the image int flag = this.globalClass.getFlag(); if (flag == 0) { int iterate = 0; int iterationMax = numberOfTemplateFeatures; for (iterate = 0; iterate < (iterationMax); iterate++) { Log.e(TAG, "Point detected " + iterate + ":(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); if (flag == 0) { templateKeyPointsArray[iterate].pt.x = scalingRatio * (templateKeyPointsArray[iterate].pt.x + (double) x0template); templateKeyPointsArray[iterate].pt.y = scalingRatio * (templateKeyPointsArray[iterate].pt.y + (double) y0template); } Log.e(TAG, "Scaled points:(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); } this.globalClass.setFlag(1); } templateKeyPoints.fromArray(templateKeyPointsArray); //Log.e(TAG, "Template-features have been scaled successfully!"); long timeBegin = (int) System.currentTimeMillis(); Mat mYuv = new Mat(mHeight + mHeight / 2, mWidth, CvType.CV_8UC1); mYuv.put(0, 0, data); Mat mRgb = new Mat(); Imgproc.cvtColor(mYuv, mRgb, Imgproc.COLOR_YUV420sp2RGB); Mat result = new Mat(); Imgproc.cvtColor(mRgb, result, Imgproc.COLOR_RGB2GRAY); int detectorType = FeatureDetector.ORB; FeatureDetector featureDetector = FeatureDetector.create(detectorType); MatOfKeyPoint keypointsImage = new MatOfKeyPoint(); featureDetector.detect(result, keypointsImage); KeyPoint[] imageKeypoints = keypointsImage.toArray(); Scalar color = new Scalar(0, 0, 0); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB); Mat imageDescriptor = new Mat(); descriptorExtractor.compute(result, keypointsImage, imageDescriptor); //BRUTEFORCE_HAMMING apparently finds even the suspicious feature-points! So, inliers and outliers can turn out to be a problem DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING); MatOfDMatch matches = new MatOfDMatch(); matcher.match(imageDescriptor, templateDescriptor, matches); //Log.e("Prasad", String.valueOf(mWidth) + "," + String.valueOf(mHeight)); DMatch[] matchesArray = matches.toArray(); double minimumMatchDistance = globalClass.getHammingDistance(); int iDescriptorMax = matchesArray.length; int iterateDescriptor; double xMatchedPoint, yMatchedPoint; int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS; Point point; double rHigh = this.globalClass.getRhigh(); double rLow = this.globalClass.getRlow(); double gHigh = this.globalClass.getGhigh(); double gLow = this.globalClass.getGlow(); double bHigh = this.globalClass.getBhigh(); double bLow = this.globalClass.getBlow(); double[] colorValue; double red, green, blue; int[] featureCount; double xKernelSize = 9, yKernelSize = 9; globalClass.setKernelSize(xKernelSize, yKernelSize); double xImageKernelScaling, yImageKernelScaling; xImageKernelScaling = xKernelSize / mWidth; yImageKernelScaling = yKernelSize / mHeight; int[][] kernel = new int[(int) xKernelSize][(int) yKernelSize]; double[][] kernelCounter = new double[(int) xKernelSize][(int) yKernelSize]; int numberKernelMax = 10; globalClass.setNumberKernelMax(numberKernelMax); int[][][] kernelArray = new int[(int) xKernelSize][(int) yKernelSize][numberKernelMax]; double featureImageResponse; double xImageCentroid, yImageCentroid; double xSum = 0, ySum = 0; double totalImageResponse = 0; for (iterateDescriptor = 0; iterateDescriptor < iDescriptorMax; iterateDescriptor++) { if (matchesArray[iterateDescriptor].distance < minimumMatchDistance) { //MatchedPoint: Awesome match without color feedback xMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.x; yMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.y; colorValue = mRgb.get((int) yMatchedPoint, (int) xMatchedPoint); red = colorValue[0]; green = colorValue[1]; blue = colorValue[2]; int xKernelFeature, yKernelFeature; //Color feedback if ((rLow < red) & (red < rHigh) & (gLow < green) & (green < gHigh) & (bLow < blue) & (blue < bHigh)) { try { featureImageResponse = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].response; if (featureImageResponse > 0) { xSum = xSum + featureImageResponse * xMatchedPoint; ySum = ySum + featureImageResponse * yMatchedPoint; totalImageResponse = totalImageResponse + featureImageResponse; point = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt; xKernelFeature = (int) (xMatchedPoint * xImageKernelScaling); yKernelFeature = (int) (yMatchedPoint * yImageKernelScaling); kernelCounter[xKernelFeature][yKernelFeature]++; //Core.circle(result, point, 3, color); } } catch (Exception e) { } } //Log.e(TAG, iterateDescriptor + ": (" + xMatchedPoint + "," + yMatchedPoint + ")"); } } int iKernel = 0, jKernel = 0; for (iKernel = 0; iKernel < xKernelSize; iKernel++) { for (jKernel = 0; jKernel < yKernelSize; jKernel++) { if (kernelCounter[iKernel][jKernel] > 0) { kernel[iKernel][jKernel] = 1; } else { kernel[iKernel][jKernel] = 0; } } } xImageCentroid = xSum / totalImageResponse; yImageCentroid = ySum / totalImageResponse; if ((Double.isNaN(xImageCentroid)) | (Double.isNaN(yImageCentroid))) { //Log.e(TAG, "Centroid is not getting detected! Increasing hamming distance (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance + 2)); } else { //Log.e(TAG, "Centroid is getting detected! Decreasing and optimising hamming (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance - 1)); int jpegCount = globalClass.getJpegFactoryCallCount(); jpegCount++; globalClass.setJpegFactoryCallCount(jpegCount); int initialisationFlag = globalClass.getInitialisationFlag(); int numberOfDistances = 10; globalClass.setNumberOfDistances(numberOfDistances); if ((jpegCount > globalClass.getNumberKernelMax()) & (jpegCount > numberOfDistances)) { globalClass.setInitialisationFlag(1); } int[][] kernelSum = new int[(int) xKernelSize][(int) yKernelSize], mask = new int[(int) xKernelSize][(int) yKernelSize]; int iJpeg, jJpeg; kernelSum = globalClass.computeKernelSum(kernel); Log.e(TAG, Arrays.deepToString(kernelSum)); for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (kernelSum[iJpeg][jJpeg] > (numberKernelMax / 4)) {//Meant for normalised kernel mask[iJpeg][jJpeg]++; } } } Log.e(TAG, Arrays.deepToString(mask)); int maskedFeatureCount = 1, xMaskFeatureSum = 0, yMaskFeatureSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xMaskFeatureSum = xMaskFeatureSum + iJpeg; yMaskFeatureSum = yMaskFeatureSum + jJpeg; maskedFeatureCount++; } } } double xMaskMean = xMaskFeatureSum / maskedFeatureCount; double yMaskMean = yMaskFeatureSum / maskedFeatureCount; double xSquaredSum = 0, ySquaredSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xSquaredSum = xSquaredSum + (iJpeg - xMaskMean) * (iJpeg - xMaskMean); ySquaredSum = ySquaredSum + (jJpeg - yMaskMean) * (jJpeg - yMaskMean); } } } double xRMSscaled = Math.sqrt(xSquaredSum); double yRMSscaled = Math.sqrt(ySquaredSum); double RMSimage = ((xRMSscaled / xImageKernelScaling) + (yRMSscaled / yImageKernelScaling)) / 2; Log.e(TAG, "RMS radius of the image: " + RMSimage); /*//Command the quadcopter and send PWM values to Arduino double throttlePWM = 1500, yawPWM = 1500, pitchPWM = 1500; double deltaThrottle = 1, deltaYaw = 1, deltaPitch = 1; throttlePWM = globalClass.getThrottlePWM(); pitchPWM = globalClass.getPitchPWM(); yawPWM = globalClass.getYawPWM(); deltaThrottle = globalClass.getThrottleDelta(); deltaPitch = globalClass.getPitchDelta(); deltaYaw = globalClass.getYawDelta(); if(yImageCentroid>yTemplateCentroid) { throttlePWM = throttlePWM + deltaThrottle; }else{ throttlePWM = throttlePWM - deltaThrottle; } if(RMSimage>distanceTemplateFeatures) { pitchPWM = pitchPWM + deltaPitch; }else{ pitchPWM = pitchPWM - deltaPitch; } if(xImageCentroid>xTemplateCentroid) { yawPWM = yawPWM + deltaYaw; }else{ yawPWM = yawPWM - deltaYaw; } if(1000>throttlePWM){ throttlePWM = 1000; } if(2000<throttlePWM){ throttlePWM = 2000; } if(1000>pitchPWM){ pitchPWM = 1000; } if(2000<pitchPWM){ pitchPWM = 2000; } if(1000>yawPWM){ yawPWM = 1000; } if(2000<yawPWM){ yawPWM = 2000; } globalClass.setPitchPWM(pitchPWM); globalClass.setYawPWM(yawPWM); globalClass.setThrottlePWM(throttlePWM);*/ //Display bounding circle int originalWidthBox = x1template - x0template; int originalHeightBox = y1template - y0template; double scaledBoundingWidth = (originalWidthBox * RMSimage / distanceTemplateFeatures); double scaledBoundingHeight = (originalHeightBox * RMSimage / distanceTemplateFeatures); double displayRadius = (scaledBoundingWidth + scaledBoundingHeight) / 2; displayRadius = displayRadius * 1.4826; displayRadius = displayRadius / numberKernelMax; double distanceAverage = 0; if (Double.isNaN(displayRadius)) { //Log.e(TAG, "displayRadius is NaN!"); } else { distanceAverage = globalClass.imageDistanceAverage(displayRadius); //Log.e(TAG, "Average distance: " + distanceAverage); } if ((Double.isNaN(xImageCentroid)) | Double.isNaN(yImageCentroid)) { //Log.e(TAG, "Centroid is NaN!"); } else { globalClass.centroidAverage(xImageCentroid, yImageCentroid); } if (initialisationFlag == 1) { //int displayRadius = 50; Point pointDisplay = new Point(); //pointDisplay.x = xImageCentroid; //pointDisplay.y = yImageCentroid; pointDisplay.x = globalClass.getXcentroidAverageGlobal(); pointDisplay.y = globalClass.getYcentroidAverageGlobal(); globalClass.centroidAverage(xImageCentroid, yImageCentroid); int distanceAverageInt = (int) distanceAverage; Core.circle(result, pointDisplay, distanceAverageInt, color); } } Log.e(TAG, "Centroid in the streamed image: (" + xImageCentroid + "," + yImageCentroid + ")"); /*try { //Features2d.drawKeypoints(result, keypointsImage, result, color, flagDraw); Features2d.drawKeypoints(result, templateKeyPoints, result, color, flagDraw); }catch(Exception e){}*/ //Log.e(TAG, "High (R,G,B): (" + rHigh + "," + gHigh + "," + bHigh + ")"); //Log.e(TAG, "Low (R,G,B): (" + rLow + "," + gLow + "," + bLow + ")"); //Log.e(TAG, Arrays.toString(matchesArray)); try { Bitmap bmp = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(result, bmp); //Utils.matToBitmap(mRgb, bmp); bmp.compress(Bitmap.CompressFormat.JPEG, mQuality, mJpegOutputStream); } catch (Exception e) { Log.e(TAG, "JPEG not working!"); } long timeEnd = (int) System.currentTimeMillis(); Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + "milli-seconds!"); mJpegData = mJpegOutputStream.toByteArray(); synchronized (mJpegOutputStream) { mJpegOutputStream.notifyAll(); } } catch (Exception e) { Log.e(TAG, "JPEG-factory is not working!"); } }
From source file:com.astrocytes.core.ImageHelper.java
License:Open Source License
public static BufferedImage convertMatToBufferedImage(Mat in) { BufferedImage out;/* w ww . java 2 s .c o m*/ byte[] data = new byte[in.cols() * in.rows() * (int) in.elemSize()]; in.get(0, 0, data); int type = BufferedImage.TYPE_3BYTE_BGR; switch (in.channels()) { case 1: type = BufferedImage.TYPE_BYTE_GRAY; break; case 3: type = BufferedImage.TYPE_3BYTE_BGR; // bgr to rgb byte b; for (int i = 0; i < data.length; i = i + 3) { b = data[i]; data[i] = data[i + 2]; data[i + 2] = b; } break; } out = new BufferedImage(in.cols(), in.rows(), type); out.getRaster().setDataElements(0, 0, in.cols(), in.rows(), data); return out; }
From source file:com.astrocytes.core.operationsengine.CoreOperations.java
License:Open Source License
/** * Converts a source color image to a gray image. * * @param src - BGR image.// w w w . ja v a2 s .co m * @return gray image. */ public static Mat grayscale(Mat src) { if (src.channels() < 3) return src; Mat dest = new Mat(src.rows(), src.cols(), CvType.CV_8UC1); cvtColor(src, dest, COLOR_BGR2GRAY); return dest; }
From source file:com.astrocytes.core.operationsengine.CoreOperations.java
License:Open Source License
/** * Remove all small contours on binary image with areas less than specified threshold. * * @param src - binary source image.//from w ww . j a va2 s.com * @param thresh - minimum area of contour. * @return a source image with removed all contours with area less than {@param thresh}. */ public static Mat clearContours(Mat src, int thresh) { if (src.channels() > 1) return src; Mat dest = src.clone(); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Mat hierarchy = new Mat(); findContours(src, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_TC89_L1); Mat maskWhite = new Mat(src.rows(), src.cols(), CvType.CV_8UC1, new Scalar(255)); Mat maskBlack = maskWhite.clone(); for (int i = 0; i < contours.size(); i++) { Double contourArea = contourArea(contours.get(i)); if (contourArea < thresh) { int pixelColor = averageIntensity(src, contours.get(i)); drawContours(pixelColor > 127 ? maskWhite : maskBlack, contours, i, new Scalar(0), Core.FILLED); } } maskWhite = erode(maskWhite, 2); maskBlack = erode(maskBlack, 2); dest = and(maskWhite, dest); dest = or(invert(maskBlack), dest); return dest; }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
@Override public Mat applyCannyEdgeDetection(Integer minThreshold, Integer maxThreshold, Boolean useImage) { CoreOperations.cannyFilter(currentImage, minThreshold, maxThreshold).copyTo(currentImage); if (useImage != null && useImage) { Mat result = sourceImage.clone(); for (int col = 0; col < result.cols(); col++) { for (int row = 0; row < result.rows(); row++) { if (currentImage.get(row, col)[0] != 0) { result.put(row, col, 255.0, 0.0, 0.0); }//from w w w.ja v a 2s.com } } result.copyTo(currentImage); } return currentImage; }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
private void detectLayers() { Mat equalizedImage = CoreOperations.invert(CoreOperations.equalize(sourceImage)); int halfColumnWidth = 50; Mat density = new Mat(equalizedImage.rows(), equalizedImage.cols(), CvType.CV_32F); int rows = density.rows(); int cols = density.cols(); // > 1 min for (int i = 0; i < rows; i++) { double p; int leftBoundInterval, rightBoundInterval, intervalLength; for (int j = 0; j < cols; j++) { p = 0.0;/* w w w .j a v a 2 s. c om*/ leftBoundInterval = Math.max(j - halfColumnWidth, 0); rightBoundInterval = Math.min(cols - 1, j + halfColumnWidth); intervalLength = rightBoundInterval - leftBoundInterval + 1; for (int s = leftBoundInterval; s <= rightBoundInterval; s++) { p += equalizedImage.get(i, s)[0]; } density.put(i, j, p / intervalLength); } } //3 seconds for (int j = 0; j < cols; j++) { double intensity = 0.0; for (int i = 0; i < rows; i++) { intensity += density.get(i, j)[0]; } for (int i = 0; i < rows; i++) { density.put(i, j, density.get(i, j)[0] / intensity); } } double ndlAverage = 1.0 / (double) rows; layerBounds = new Mat(6, cols, CvType.CV_32F); double k1 = 0.56E-4; double k2 = 1.3E-4; /*float[] data = new float[density.rows() * (int) density.elemSize()]; density.get(0, 10, data);*/ Mat upperBoundExact = new Mat(1, cols, CvType.CV_32F); Mat lowerBoundExact = new Mat(1, cols, CvType.CV_32F); for (int j = 0; j < cols; j++) { int upperBound = 0; int lowerBound = 0; for (int i = 0; i < rows; i++) { if (density.get(i, j)[0] > ndlAverage + k1) { upperBound = i; break; } } for (int i = rows - 1; i >= 0; i--) { if (density.get(i, j)[0] > ndlAverage + k2) { lowerBound = i; break; } } upperBoundExact.put(0, j, upperBound); lowerBoundExact.put(0, j, lowerBound); } //moving average for bounds int movingAverage = 300; for (int i = 0; i < upperBoundExact.cols(); i++) { int leftBoundInterval = Math.max(i - movingAverage, 0); int rightBoundInterval = Math.min(cols - 1, i + movingAverage); int intervalLength = rightBoundInterval - leftBoundInterval + 1; int upperBoundAverage = 0; int lowerBoundAverage = 0; for (int j = leftBoundInterval; j <= rightBoundInterval; j++) { upperBoundAverage += upperBoundExact.get(0, j)[0]; lowerBoundAverage += lowerBoundExact.get(0, j)[0]; } upperBoundAverage /= intervalLength; lowerBoundAverage /= intervalLength; int columnHeight = lowerBoundAverage - upperBoundAverage; layerBounds.put(0, i, upperBoundAverage); for (int h = 1; h < 5; h++) { layerBounds.put(h, i, upperBoundAverage + BRODMANN_COEFFS[h - 1] * columnHeight); } layerBounds.put(5, i, lowerBoundAverage); } }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
private Mat applyRayCastingSegmentation() { //Mat cannyEdges = CoreOperations.cannyFilter(sourceImage, 26, 58); Mat contours = new Mat(preparedImage.rows(), preparedImage.cols(), CvType.CV_32S); int contoursCount = /*neurons.size();*/ CoreOperations .drawAllContours(CoreOperations.erode(preparedImage, 5), contours); Mat result = new Mat(preparedImage.rows(), preparedImage.cols(), preparedImage.type());//CoreOperations.or(CoreOperations.and(cannyEdges, CoreOperations.grayscale(preparedImage)), contours); //cannyEdges.release(); //Mat markers = new Mat(contours.rows(), contours.cols(), CvType.CV_32S); //contours.copyTo(markers); contours.convertTo(contours, CvType.CV_32S); for (Neuron neuron : neurons) { int x = (int) neuron.getCenter().x; int y = (int) neuron.getCenter().y; int color = (int) preparedImage.get(y, x)[0]; /*contours.put(y, x, color); contours.put(y - 2, x, color);//from w w w. j av a2 s. c om contours.put(y + 2, x, color); contours.put(y, x - 2, color); contours.put(y, x + 2, color);*/ Imgproc.circle(contours, neuron.getCenter(), (int) (0.4f * neuron.getRadius()), new Scalar(color), -1); } Imgproc.watershed(sourceImage, contours); for (int i = 0; i < contours.rows(); i++) { for (int j = 0; j < contours.cols(); j++) { int index = (int) contours.get(i, j)[0]; if (index == -1) { result.put(i, j, 0, 0, 0); } else if (index <= 0 || index > contoursCount) { result.put(i, j, 0, 0, 0); } else { if (index == 255) { result.put(i, j, 0, 0, 0/*sourceImage.get(i, j)*/); } else { result.put(i, j, index, index, index); } } } } result = CoreOperations.erode(result, 2); result = CoreOperations.dilate(result, 3); contours.release(); contours = sourceImage.clone(); CoreOperations.drawAllContours(result, contours); return contours; }
From source file:com.astrocytes.core.operationsengine.OperationsImpl.java
License:Open Source License
private List<Mat> showClusters(Mat cutout, Mat labels, Mat centers) { centers.convertTo(centers, CvType.CV_8UC1, 255.0); centers.reshape(3);// w ww. java2s .c o m List<Mat> clusters = new ArrayList<Mat>(); for (int i = 0; i < centers.rows(); i++) { clusters.add(Mat.zeros(cutout.size(), cutout.type())); } Map<Integer, Integer> counts = new HashMap<Integer, Integer>(); for (int i = 0; i < centers.rows(); i++) { counts.put(i, 0); } for (int y = 0; y < cutout.rows(); y++) { int rows = 0; for (int x = 0; x < cutout.cols(); x++) { int label = (int) labels.get(rows, 0)[0]; int r = (int) centers.get(label, 2)[0]; int g = (int) centers.get(label, 1)[0]; int b = (int) centers.get(label, 0)[0]; counts.put(label, counts.get(label) + 1); clusters.get(label).put(y, x, b, g, r); rows++; } } System.out.println(counts); return clusters; }
From source file:com.carver.paul.truesight.ImageRecognition.ImageTools.java
License:Open Source License
public static Bitmap GetBitmapFromMat(Mat mat, boolean convertColor) { Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888); if (convertColor) { Mat finalColourMat = new Mat(); Imgproc.cvtColor(mat, finalColourMat, Imgproc.COLOR_BGR2RGB); matToBitmap(finalColourMat, bitmap); } else {/*from w ww . ja v a 2s .c o m*/ matToBitmap(mat, bitmap); } return bitmap; }