List of usage examples for org.opencv.core Mat put
public int put(int row, int col, byte[] data)
From source file:classes.TextRecognitionPreparer.java
static Mat replaceColor(Mat image, Scalar color1, Scalar color2) { Mat replaced = image.clone(); for (int y = 0; y < image.rows(); y++) { for (int x = 0; x < image.cols(); x++) { double[] values = image.get(y, x); double r = values[0]; double g = values[1]; double b = values[2]; if (b == color1.val[0] && g == color1.val[1] && r == color1.val[2]) { values[0] = color2.val[2]; values[1] = color2.val[1]; values[2] = color2.val[0]; }//from www . ja v a 2 s. co m replaced.put(y, x, values); } } return replaced; }
From source file:classes.TextRecognitionPreparer.java
private static Mat reduceColor(Mat image, int div) { Mat result = new Mat(image.size(), image.type()); int rows = image.rows(); // number of lines int cols = image.cols(); // number of elements per line for (int j = 0; j < rows; j++) { for (int i = 0; i < cols; i++) { double[] data = image.get(j, i); for (int k = 0; k < 3; k++) { data[k] = ((int) data[k] / div) * div + div / 2; }//from w w w. j av a2 s . c o m int put = result.put(j, i, data); } } return result; }
From source file:classes.Util.java
public static Mat img2Mat(BufferedImage in) { int width = in.getWidth(); int height = in.getHeight(); Mat out; byte[] data;/*from w ww. j a v a2 s .com*/ int r, g, b; if (in.getType() == BufferedImage.TYPE_INT_RGB) { out = new Mat(height, width, CvType.CV_8UC3); data = new byte[width * height * (int) out.elemSize()]; int[] dataBuff = in.getRGB(0, 0, width, height, null, 0, width); for (int i = 0; i < dataBuff.length; i++) { data[i * 3] = (byte) ((dataBuff[i] >> 16) & 0xFF); data[i * 3 + 1] = (byte) ((dataBuff[i] >> 8) & 0xFF); data[i * 3 + 2] = (byte) ((dataBuff[i] >> 0) & 0xFF); } } else { out = new Mat(height, width, CvType.CV_8UC1); data = new byte[width * height * (int) out.elemSize()]; int[] dataBuff = in.getRGB(0, 0, width, height, null, 0, width); for (int i = 0; i < dataBuff.length; i++) { r = (byte) ((dataBuff[i] >> 16) & 0xFF); g = (byte) ((dataBuff[i] >> 8) & 0xFF); b = (byte) ((dataBuff[i] >> 0) & 0xFF); data[i] = (byte) ((0.21 * r) + (0.71 * g) + (0.07 * b)); //luminosity } } out.put(0, 0, data); return out; }
From source file:cmib_4_4.FeatureVector.java
public Mat removeNoisePixcels(Mat noiseGreyImage, Mat rbgImage) { Mat image = noiseGreyImage; Mat image1 = rbgImage;//from ww w . java2s .co m int size = (int) (image.total() * image.channels()); byte[] get = new byte[size]; byte[] temp1 = new byte[size]; int size1 = (int) (image1.total() * image1.channels()); byte[] rgb1 = new byte[size1]; for (int i = 0; i < image.rows(); i++) { for (int j = 0; j < image.cols(); j++) { image.get(i, j, get); if (get[0] == -1) { image.put(i, j, 1); } image.get(i, j, get); //System.out.println(get[0]); if (get[0] == 1) { if (i == 0 & j == 0) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j + 1, a1); image.get(i + 1, j + 1, a2); image.get(i + 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 0.0); //System.out.println("1"); } else { image.put(i, j, 0); } } else if (i == 0 & j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j - 1, a1); image.get(i + 1, j - 1, a2); image.get(i + 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("2"); } else { image.put(i, j, 0); } } else if (i == image.rows() & j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { // image1.put(j, i, 1.0); // System.out.println("3"); } else { image.put(i, j, 0); } } else if (j == 0 & i == image.rows()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j + 1, a1); image.get(i - 1, j + 1, a2); image.get(i - 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("4"); } else { image.put(i, j, 0); } } else if (j == 0) { double[] a1 = image.get(i - 1, j); double[] a2 = image.get(i - 1, j + 1); double[] a3 = image.get(i, j + 1); double[] a4 = image.get(i + 1, j + 1); double[] a5 = image.get(i + 1, j); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("5"); } else { image.put(i, j, 0); } } else if (i == 0) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i, j - 1, a1); image.get(i + 1, j - 1, a2); image.get(i + 1, j, a3); image.get(i + 1, j + 1, a4); image.get(i, j + 1, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("6"); } else { image.put(i, j, 0); } } else if (j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); image.get(i + 1, j - 1, a4); image.get(i + 1, j, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("7"); } else { image.put(i, j, 0); } } else if (i == image.rows()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i, j + 1, a1); image.get(i - 1, j + 1, a2); image.get(i - 1, j, a3); image.get(i - 1, j - 1, a4); image.get(i, j - 1, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("8"); } else { image.put(i, j, 0); } } else { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; byte[] a6 = new byte[1]; byte[] a7 = new byte[1]; byte[] a8 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); image.get(i + 1, j - 1, a4); image.get(i + 1, j, a5); image.get(i + 1, j + 1, a6); image.get(i, j + 1, a7); image.get(i - 1, j + 1, a8); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == 1 & a4[0] == -1 & a5[0] == -1) | (a4[0] == -1 & a5[0] == -1 & a6[0] == -1) | (a5[0] == -1 & a6[0] == 1 & a7[0] == -1) | (a6[0] == -1 & a7[0] == -1 & a8[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("9"); } else { image.put(i, j, 0); } } } } } ////////////////////// Obtain RGB final blood cell image ///////////////////////////// for (int i = 0; i < image.rows(); i++) { for (int j = 0; j < image.cols(); j++) { image.get(i, j, temp1); image1.get(i, j, rgb1); if (temp1[0] == -1) { image.put(i, j, 1); } image.get(i, j, temp1); //System.out.println(temp1[0]); byte r = (byte) (rgb1[0] * temp1[0]); byte g = (byte) (rgb1[1] * temp1[0]); byte b = (byte) (rgb1[2] * temp1[0]); image1.put(i, j, new byte[] { r, g, b }); } } ///////////////////////////////////////////////////////////////////////////////////// return image1; }
From source file:cmib_4_4.NoiseRemove.java
public Mat removeNoisePixcels(Mat noiseGreyImage, Mat rbgImage) { Mat image = noiseGreyImage; Mat image1 = rbgImage;/*from ww w .ja v a 2 s. com*/ int size = (int) (image.total() * image.channels()); byte[] get = new byte[size]; byte[] temp1 = new byte[size]; int size1 = (int) (image1.total() * image1.channels()); byte[] rgb1 = new byte[size1]; for (int i = 0; i < image.rows(); i++) { for (int j = 0; j < image.cols(); j++) { image.get(i, j, get); if (get[0] == -1) { image.put(i, j, 1); } image.get(i, j, get); //System.out.println(get[0]); if (get[0] == 1) { if (i == 0 & j == 0) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j + 1, a1); image.get(i + 1, j + 1, a2); image.get(i + 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 0.0); //System.out.println("1"); } else { image.put(i, j, 0); } } else if (i == 0 & j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j - 1, a1); image.get(i + 1, j - 1, a2); image.get(i + 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("2"); } else { image.put(i, j, 0); } } else if (i == image.rows() & j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { // image1.put(j, i, 1.0); // System.out.println("3"); } else { image.put(i, j, 0); } } else if (j == 0 & i == image.rows()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; image.get(i, j + 1, a1); image.get(i - 1, j + 1, a2); image.get(i - 1, j, a3); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("4"); } else { image.put(i, j, 0); } } else if (j == 0) { double[] a1 = image.get(i - 1, j); double[] a2 = image.get(i - 1, j + 1); double[] a3 = image.get(i, j + 1); double[] a4 = image.get(i + 1, j + 1); double[] a5 = image.get(i + 1, j); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("5"); } else { image.put(i, j, 0); } } else if (i == 0) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i, j - 1, a1); image.get(i + 1, j - 1, a2); image.get(i + 1, j, a3); image.get(i + 1, j + 1, a4); image.get(i, j + 1, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("6"); } else { image.put(i, j, 0); } } else if (j == image.cols()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); image.get(i + 1, j - 1, a4); image.get(i + 1, j, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("7"); } else { image.put(i, j, 0); } } else if (i == image.rows()) { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; image.get(i, j + 1, a1); image.get(i - 1, j + 1, a2); image.get(i - 1, j, a3); image.get(i - 1, j - 1, a4); image.get(i, j - 1, a5); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == -1 & a4[0] == -1 & a5[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("8"); } else { image.put(i, j, 0); } } else { byte[] a1 = new byte[1]; byte[] a2 = new byte[1]; byte[] a3 = new byte[1]; byte[] a4 = new byte[1]; byte[] a5 = new byte[1]; byte[] a6 = new byte[1]; byte[] a7 = new byte[1]; byte[] a8 = new byte[1]; image.get(i - 1, j, a1); image.get(i - 1, j - 1, a2); image.get(i, j - 1, a3); image.get(i + 1, j - 1, a4); image.get(i + 1, j, a5); image.get(i + 1, j + 1, a6); image.get(i, j + 1, a7); image.get(i - 1, j + 1, a8); if ((a1[0] == -1 & a2[0] == -1 & a3[0] == -1) | (a2[0] == -1 & a3[0] == -1 & a4[0] == -1) | (a3[0] == 1 & a4[0] == -1 & a5[0] == -1) | (a4[0] == -1 & a5[0] == -1 & a6[0] == -1) | (a5[0] == -1 & a6[0] == 1 & a7[0] == -1) | (a6[0] == -1 & a7[0] == -1 & a8[0] == -1)) { //image1.put(j, i, 1.0); // System.out.println("9"); } else { image.put(i, j, 0); } } } } } ////////////////////// Obtain RGB final blood cell image ///////////////////////////// for (int i = 0; i < image.rows(); i++) { for (int j = 0; j < image.cols(); j++) { image.get(i, j, temp1); image1.get(i, j, rgb1); if (temp1[0] == -1) { image.put(i, j, 1); } image.get(i, j, temp1); byte r = (byte) (rgb1[0] * temp1[0]); byte g = (byte) (rgb1[1] * temp1[0]); byte b = (byte) (rgb1[2] * temp1[0]); image1.put(i, j, new byte[] { r, g, b }); } } return image1; }
From source file:cn.xiongyihui.webcam.JpegFactory.java
License:Open Source License
public void onPreviewFrame(byte[] data, Camera camera) { YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null); mJpegOutputStream.reset();/*w ww.ja v a2 s . co m*/ try { //Log.e(TAG, "Beginning to read values!"); double distanceTemplateFeatures = this.globalClass.getDistanceTemplateFeatures(); double xTemplateCentroid = this.globalClass.getXtemplateCentroid(); double yTemplateCentroid = this.globalClass.getYtemplateCentroid(); int x0template = this.globalClass.getX0display(); int y0template = this.globalClass.getY0display(); int x1template = this.globalClass.getX1display(); int y1template = this.globalClass.getY1display(); Mat templateDescriptor = this.globalClass.getTemplateDescriptor(); MatOfKeyPoint templateKeyPoints = this.globalClass.getKeyPoints(); KeyPoint[] templateKeyPointsArray = templateKeyPoints.toArray(); int numberOfTemplateFeatures = this.globalClass.getNumberOfTemplateFeatures(); int numberOfPositiveTemplateFeatures = this.globalClass.getNumberOfPositiveTemplateFeatures(); KeyPoint[] normalisedTemplateKeyPoints = this.globalClass.getNormalisedTemplateKeyPoints(); double normalisedXcentroid = this.globalClass.getNormalisedXcentroid(); double normalisedYcentroid = this.globalClass.getNormalisedYcentroid(); int templateCapturedBitmapWidth = this.globalClass.getTemplateCapturedBitmapWidth(); int templateCapturedBitmapHeight = this.globalClass.getTemplateCapturedBitmapHeight(); //Log.e(TAG, "Ended reading values!"); globalClass.setJpegFactoryDimensions(mWidth, mHeight); double scalingRatio, scalingRatioHeight, scalingRatioWidth; scalingRatioHeight = (double) mHeight / (double) templateCapturedBitmapHeight; scalingRatioWidth = (double) mWidth / (double) templateCapturedBitmapWidth; scalingRatio = (scalingRatioHeight + scalingRatioWidth) / 2; //Just to account for any minor variations. //Log.e(TAG, "Scaling ratio:" + String.valueOf(scalingRatio)); //Log.e("Test", "Captured Bitmap's dimensions: (" + templateCapturedBitmapHeight + "," + templateCapturedBitmapWidth + ")"); //Scale the actual features of the image int flag = this.globalClass.getFlag(); if (flag == 0) { int iterate = 0; int iterationMax = numberOfTemplateFeatures; for (iterate = 0; iterate < (iterationMax); iterate++) { Log.e(TAG, "Point detected " + iterate + ":(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); if (flag == 0) { templateKeyPointsArray[iterate].pt.x = scalingRatio * (templateKeyPointsArray[iterate].pt.x + (double) x0template); templateKeyPointsArray[iterate].pt.y = scalingRatio * (templateKeyPointsArray[iterate].pt.y + (double) y0template); } Log.e(TAG, "Scaled points:(" + templateKeyPointsArray[iterate].pt.x + "," + templateKeyPointsArray[iterate].pt.y + ")"); } this.globalClass.setFlag(1); } templateKeyPoints.fromArray(templateKeyPointsArray); //Log.e(TAG, "Template-features have been scaled successfully!"); long timeBegin = (int) System.currentTimeMillis(); Mat mYuv = new Mat(mHeight + mHeight / 2, mWidth, CvType.CV_8UC1); mYuv.put(0, 0, data); Mat mRgb = new Mat(); Imgproc.cvtColor(mYuv, mRgb, Imgproc.COLOR_YUV420sp2RGB); Mat result = new Mat(); Imgproc.cvtColor(mRgb, result, Imgproc.COLOR_RGB2GRAY); int detectorType = FeatureDetector.ORB; FeatureDetector featureDetector = FeatureDetector.create(detectorType); MatOfKeyPoint keypointsImage = new MatOfKeyPoint(); featureDetector.detect(result, keypointsImage); KeyPoint[] imageKeypoints = keypointsImage.toArray(); Scalar color = new Scalar(0, 0, 0); DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB); Mat imageDescriptor = new Mat(); descriptorExtractor.compute(result, keypointsImage, imageDescriptor); //BRUTEFORCE_HAMMING apparently finds even the suspicious feature-points! So, inliers and outliers can turn out to be a problem DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING); MatOfDMatch matches = new MatOfDMatch(); matcher.match(imageDescriptor, templateDescriptor, matches); //Log.e("Prasad", String.valueOf(mWidth) + "," + String.valueOf(mHeight)); DMatch[] matchesArray = matches.toArray(); double minimumMatchDistance = globalClass.getHammingDistance(); int iDescriptorMax = matchesArray.length; int iterateDescriptor; double xMatchedPoint, yMatchedPoint; int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS; Point point; double rHigh = this.globalClass.getRhigh(); double rLow = this.globalClass.getRlow(); double gHigh = this.globalClass.getGhigh(); double gLow = this.globalClass.getGlow(); double bHigh = this.globalClass.getBhigh(); double bLow = this.globalClass.getBlow(); double[] colorValue; double red, green, blue; int[] featureCount; double xKernelSize = 9, yKernelSize = 9; globalClass.setKernelSize(xKernelSize, yKernelSize); double xImageKernelScaling, yImageKernelScaling; xImageKernelScaling = xKernelSize / mWidth; yImageKernelScaling = yKernelSize / mHeight; int[][] kernel = new int[(int) xKernelSize][(int) yKernelSize]; double[][] kernelCounter = new double[(int) xKernelSize][(int) yKernelSize]; int numberKernelMax = 10; globalClass.setNumberKernelMax(numberKernelMax); int[][][] kernelArray = new int[(int) xKernelSize][(int) yKernelSize][numberKernelMax]; double featureImageResponse; double xImageCentroid, yImageCentroid; double xSum = 0, ySum = 0; double totalImageResponse = 0; for (iterateDescriptor = 0; iterateDescriptor < iDescriptorMax; iterateDescriptor++) { if (matchesArray[iterateDescriptor].distance < minimumMatchDistance) { //MatchedPoint: Awesome match without color feedback xMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.x; yMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.y; colorValue = mRgb.get((int) yMatchedPoint, (int) xMatchedPoint); red = colorValue[0]; green = colorValue[1]; blue = colorValue[2]; int xKernelFeature, yKernelFeature; //Color feedback if ((rLow < red) & (red < rHigh) & (gLow < green) & (green < gHigh) & (bLow < blue) & (blue < bHigh)) { try { featureImageResponse = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].response; if (featureImageResponse > 0) { xSum = xSum + featureImageResponse * xMatchedPoint; ySum = ySum + featureImageResponse * yMatchedPoint; totalImageResponse = totalImageResponse + featureImageResponse; point = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt; xKernelFeature = (int) (xMatchedPoint * xImageKernelScaling); yKernelFeature = (int) (yMatchedPoint * yImageKernelScaling); kernelCounter[xKernelFeature][yKernelFeature]++; //Core.circle(result, point, 3, color); } } catch (Exception e) { } } //Log.e(TAG, iterateDescriptor + ": (" + xMatchedPoint + "," + yMatchedPoint + ")"); } } int iKernel = 0, jKernel = 0; for (iKernel = 0; iKernel < xKernelSize; iKernel++) { for (jKernel = 0; jKernel < yKernelSize; jKernel++) { if (kernelCounter[iKernel][jKernel] > 0) { kernel[iKernel][jKernel] = 1; } else { kernel[iKernel][jKernel] = 0; } } } xImageCentroid = xSum / totalImageResponse; yImageCentroid = ySum / totalImageResponse; if ((Double.isNaN(xImageCentroid)) | (Double.isNaN(yImageCentroid))) { //Log.e(TAG, "Centroid is not getting detected! Increasing hamming distance (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance + 2)); } else { //Log.e(TAG, "Centroid is getting detected! Decreasing and optimising hamming (error-tolerance)!"); globalClass.setHammingDistance((int) (minimumMatchDistance - 1)); int jpegCount = globalClass.getJpegFactoryCallCount(); jpegCount++; globalClass.setJpegFactoryCallCount(jpegCount); int initialisationFlag = globalClass.getInitialisationFlag(); int numberOfDistances = 10; globalClass.setNumberOfDistances(numberOfDistances); if ((jpegCount > globalClass.getNumberKernelMax()) & (jpegCount > numberOfDistances)) { globalClass.setInitialisationFlag(1); } int[][] kernelSum = new int[(int) xKernelSize][(int) yKernelSize], mask = new int[(int) xKernelSize][(int) yKernelSize]; int iJpeg, jJpeg; kernelSum = globalClass.computeKernelSum(kernel); Log.e(TAG, Arrays.deepToString(kernelSum)); for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (kernelSum[iJpeg][jJpeg] > (numberKernelMax / 4)) {//Meant for normalised kernel mask[iJpeg][jJpeg]++; } } } Log.e(TAG, Arrays.deepToString(mask)); int maskedFeatureCount = 1, xMaskFeatureSum = 0, yMaskFeatureSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xMaskFeatureSum = xMaskFeatureSum + iJpeg; yMaskFeatureSum = yMaskFeatureSum + jJpeg; maskedFeatureCount++; } } } double xMaskMean = xMaskFeatureSum / maskedFeatureCount; double yMaskMean = yMaskFeatureSum / maskedFeatureCount; double xSquaredSum = 0, ySquaredSum = 0; for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) { for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) { if (mask[iJpeg][jJpeg] == 1) { xSquaredSum = xSquaredSum + (iJpeg - xMaskMean) * (iJpeg - xMaskMean); ySquaredSum = ySquaredSum + (jJpeg - yMaskMean) * (jJpeg - yMaskMean); } } } double xRMSscaled = Math.sqrt(xSquaredSum); double yRMSscaled = Math.sqrt(ySquaredSum); double RMSimage = ((xRMSscaled / xImageKernelScaling) + (yRMSscaled / yImageKernelScaling)) / 2; Log.e(TAG, "RMS radius of the image: " + RMSimage); /*//Command the quadcopter and send PWM values to Arduino double throttlePWM = 1500, yawPWM = 1500, pitchPWM = 1500; double deltaThrottle = 1, deltaYaw = 1, deltaPitch = 1; throttlePWM = globalClass.getThrottlePWM(); pitchPWM = globalClass.getPitchPWM(); yawPWM = globalClass.getYawPWM(); deltaThrottle = globalClass.getThrottleDelta(); deltaPitch = globalClass.getPitchDelta(); deltaYaw = globalClass.getYawDelta(); if(yImageCentroid>yTemplateCentroid) { throttlePWM = throttlePWM + deltaThrottle; }else{ throttlePWM = throttlePWM - deltaThrottle; } if(RMSimage>distanceTemplateFeatures) { pitchPWM = pitchPWM + deltaPitch; }else{ pitchPWM = pitchPWM - deltaPitch; } if(xImageCentroid>xTemplateCentroid) { yawPWM = yawPWM + deltaYaw; }else{ yawPWM = yawPWM - deltaYaw; } if(1000>throttlePWM){ throttlePWM = 1000; } if(2000<throttlePWM){ throttlePWM = 2000; } if(1000>pitchPWM){ pitchPWM = 1000; } if(2000<pitchPWM){ pitchPWM = 2000; } if(1000>yawPWM){ yawPWM = 1000; } if(2000<yawPWM){ yawPWM = 2000; } globalClass.setPitchPWM(pitchPWM); globalClass.setYawPWM(yawPWM); globalClass.setThrottlePWM(throttlePWM);*/ //Display bounding circle int originalWidthBox = x1template - x0template; int originalHeightBox = y1template - y0template; double scaledBoundingWidth = (originalWidthBox * RMSimage / distanceTemplateFeatures); double scaledBoundingHeight = (originalHeightBox * RMSimage / distanceTemplateFeatures); double displayRadius = (scaledBoundingWidth + scaledBoundingHeight) / 2; displayRadius = displayRadius * 1.4826; displayRadius = displayRadius / numberKernelMax; double distanceAverage = 0; if (Double.isNaN(displayRadius)) { //Log.e(TAG, "displayRadius is NaN!"); } else { distanceAverage = globalClass.imageDistanceAverage(displayRadius); //Log.e(TAG, "Average distance: " + distanceAverage); } if ((Double.isNaN(xImageCentroid)) | Double.isNaN(yImageCentroid)) { //Log.e(TAG, "Centroid is NaN!"); } else { globalClass.centroidAverage(xImageCentroid, yImageCentroid); } if (initialisationFlag == 1) { //int displayRadius = 50; Point pointDisplay = new Point(); //pointDisplay.x = xImageCentroid; //pointDisplay.y = yImageCentroid; pointDisplay.x = globalClass.getXcentroidAverageGlobal(); pointDisplay.y = globalClass.getYcentroidAverageGlobal(); globalClass.centroidAverage(xImageCentroid, yImageCentroid); int distanceAverageInt = (int) distanceAverage; Core.circle(result, pointDisplay, distanceAverageInt, color); } } Log.e(TAG, "Centroid in the streamed image: (" + xImageCentroid + "," + yImageCentroid + ")"); /*try { //Features2d.drawKeypoints(result, keypointsImage, result, color, flagDraw); Features2d.drawKeypoints(result, templateKeyPoints, result, color, flagDraw); }catch(Exception e){}*/ //Log.e(TAG, "High (R,G,B): (" + rHigh + "," + gHigh + "," + bHigh + ")"); //Log.e(TAG, "Low (R,G,B): (" + rLow + "," + gLow + "," + bLow + ")"); //Log.e(TAG, Arrays.toString(matchesArray)); try { Bitmap bmp = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(result, bmp); //Utils.matToBitmap(mRgb, bmp); bmp.compress(Bitmap.CompressFormat.JPEG, mQuality, mJpegOutputStream); } catch (Exception e) { Log.e(TAG, "JPEG not working!"); } long timeEnd = (int) System.currentTimeMillis(); Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + "milli-seconds!"); mJpegData = mJpegOutputStream.toByteArray(); synchronized (mJpegOutputStream) { mJpegOutputStream.notifyAll(); } } catch (Exception e) { Log.e(TAG, "JPEG-factory is not working!"); } }
From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java
License:Apache License
/** * Create a camera intrinsic matrix using input parameters * * The camera intrinsic matrix will be like: * * +- -+/* w w w . ja v a2 s . co m*/ * | f 0 center.width | * A = | 0 f center.height | * | 0 0 1 | * +- -+ * * @return An approximated (not actually calibrated) camera matrix */ private static Mat cameraMatrix(float f, Size center) { final double[] data = { f, 0, center.width, 0, f, center.height, 0, 0, 1f }; Mat m = new Mat(3, 3, CvType.CV_64F); m.put(0, 0, data); return m; }
From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java
License:Apache License
private static Mat quat2rpy(Mat quat) { double[] q = new double[4]; quat.get(0, 0, q);//from w w w .j av a 2 s. c o m double[] rpy = { Math.atan2(2 * (q[0] * q[1] + q[2] * q[3]), 1 - 2 * (q[1] * q[1] + q[2] * q[2])), Math.asin(2 * (q[0] * q[2] - q[3] * q[1])), Math.atan2(2 * (q[0] * q[3] + q[1] * q[2]), 1 - 2 * (q[2] * q[2] + q[3] * q[3])) }; Mat rpym = new Mat(3, 1, CvType.CV_64F); rpym.put(0, 0, rpy); return rpym; }
From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java
License:Apache License
private static Mat rodr2quat(Mat rodr) { double t = Core.norm(rodr); double[] r = new double[3]; rodr.get(0, 0, r);//from ww w. j av a 2 s. c om double[] quat = { Math.cos(t / 2), Math.sin(t / 2) * r[0] / t, Math.sin(t / 2) * r[1] / t, Math.sin(t / 2) * r[2] / t }; Mat quatm = new Mat(4, 1, CvType.CV_64F); quatm.put(0, 0, quat); return quatm; }
From source file:com.astrocytes.core.ImageHelper.java
License:Open Source License
public static Mat convertBufferedImageToMat(BufferedImage in) { byte[] pixels = ((DataBufferByte) in.getRaster().getDataBuffer()).getData(); int type = CvType.CV_8UC3; if (in.getType() == BufferedImage.TYPE_BYTE_GRAY) { type = CvType.CV_8UC1;//from www.jav a2 s . c om } Mat out = new Mat(in.getHeight(), in.getWidth(), type); out.put(0, 0, pixels); return out; }