List of usage examples for org.opencv.core Mat convertTo
public void convertTo(Mat m, int rtype)
From source file:com.trandi.opentld.tld.Tld.java
License:Apache License
/** * Output: resized zero-mean patch/pattern * @param inImg INPUT, outPattern OUTPUT * @return stdev/* w ww . j av a 2s .co m*/ */ private static double resizeZeroMeanStdev(final Mat inImg, Mat outPattern, int patternSize) { if (inImg == null || outPattern == null) { return -1; } Imgproc.resize(inImg, outPattern, new Size(patternSize, patternSize)); final MatOfDouble mean = new MatOfDouble(); final MatOfDouble stdev = new MatOfDouble(); Core.meanStdDev(outPattern, mean, stdev); outPattern.convertTo(outPattern, CvType.CV_32F); Core.subtract(outPattern, new Scalar(mean.toArray()[0]), outPattern); return stdev.toArray()[0]; }
From source file:com.wallerlab.compcellscope.calcDPCTask.java
License:BSD License
protected Long doInBackground(Mat... matrix_list) { //int count = urls.length; Mat in1 = matrix_list[0];/*from w ww . j av a 2s . c om*/ Mat in2 = matrix_list[1]; Mat outputMat = matrix_list[2]; Mat Mat1 = new Mat(in1.width(), in1.height(), in1.type()); Mat Mat2 = new Mat(in2.width(), in2.height(), in2.type()); in1.copyTo(Mat1); in2.copyTo(Mat2); Imgproc.cvtColor(Mat1, Mat1, Imgproc.COLOR_RGBA2GRAY, 1); Imgproc.cvtColor(Mat2, Mat2, Imgproc.COLOR_RGBA2GRAY, 1); Mat output = new Mat(Mat1.width(), Mat1.height(), CvType.CV_8UC4); Mat dpcSum = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); Mat dpcDifference = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); Mat dpcImgF = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); /* Log.d(TAG,String.format("Mat1 format is %.1f-%.1f, type: %d",Mat1.size().width,Mat1.size().height,Mat1.type())); Log.d(TAG,String.format("Mat2 format is %.1f-%.1f, type: %d",Mat2.size().width,Mat2.size().height,Mat2.type())); */ // Convert to Floats Mat1.convertTo(Mat1, CvType.CV_32FC1); Mat2.convertTo(Mat2, CvType.CV_32FC1); Core.add(Mat1, Mat2, dpcSum); Core.subtract(Mat1, Mat2, dpcDifference); Core.divide(dpcDifference, dpcSum, dpcImgF); Core.add(dpcImgF, new Scalar(1.0), dpcImgF); // Normalize to 0-2.0 Core.multiply(dpcImgF, new Scalar(110), dpcImgF); // Normalize to 0-255 dpcImgF.convertTo(output, CvType.CV_8UC1); // Convert back into RGB Imgproc.cvtColor(output, output, Imgproc.COLOR_GRAY2RGBA, 4); dpcSum.release(); dpcDifference.release(); dpcImgF.release(); Mat1.release(); Mat2.release(); Mat maskedImg = Mat.zeros(output.rows(), output.cols(), CvType.CV_8UC4); int radius = maskedImg.width() / 2 + 25; Core.circle(maskedImg, new Point(maskedImg.width() / 2, maskedImg.height() / 2), radius, new Scalar(255, 255, 255), -1, 8, 0); output.copyTo(outputMat, maskedImg); output.release(); maskedImg.release(); return null; }
From source file:com.wallerlab.compcellscope.MultiModeViewActivity.java
License:BSD License
public Mat calcDPC(Mat in1, Mat in2, Mat out) { Mat Mat1 = new Mat(in1.width(), in1.height(), in1.type()); Mat Mat2 = new Mat(in2.width(), in2.height(), in2.type()); in1.copyTo(Mat1);//w ww. j av a 2 s .c o m in2.copyTo(Mat2); Imgproc.cvtColor(Mat1, Mat1, Imgproc.COLOR_RGBA2GRAY, 1); Imgproc.cvtColor(Mat2, Mat2, Imgproc.COLOR_RGBA2GRAY, 1); Mat output = new Mat(Mat1.width(), Mat1.height(), CvType.CV_8UC4); Mat dpcSum = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); Mat dpcDifference = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); Mat dpcImgF = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1); /* Log.d(TAG,String.format("Mat1 format is %.1f-%.1f, type: %d",Mat1.size().width,Mat1.size().height,Mat1.type())); Log.d(TAG,String.format("Mat2 format is %.1f-%.1f, type: %d",Mat2.size().width,Mat2.size().height,Mat2.type())); */ // Convert to Floats Mat1.convertTo(Mat1, CvType.CV_32FC1); Mat2.convertTo(Mat2, CvType.CV_32FC1); Core.add(Mat1, Mat2, dpcSum); Core.subtract(Mat1, Mat2, dpcDifference); Core.divide(dpcDifference, dpcSum, dpcImgF); Core.add(dpcImgF, new Scalar(1.0), dpcImgF); // Normalize to 0-2.0 Core.multiply(dpcImgF, new Scalar(110), dpcImgF); // Normalize to 0-255 dpcImgF.convertTo(output, CvType.CV_8UC1); // Convert back into RGB Imgproc.cvtColor(output, output, Imgproc.COLOR_GRAY2RGBA, 4); dpcSum.release(); dpcDifference.release(); dpcImgF.release(); Mat1.release(); Mat2.release(); Mat maskedImg = Mat.zeros(output.rows(), output.cols(), CvType.CV_8UC4); int radius = maskedImg.width() / 2 + 25; Core.circle(maskedImg, new Point(maskedImg.width() / 2, maskedImg.height() / 2), radius, new Scalar(255, 255, 255), -1, 8, 0); output.copyTo(out, maskedImg); output.release(); maskedImg.release(); return out; }
From source file:com.wallerlab.processing.tasks.ComputeRefocusTask.java
License:BSD License
private Bitmap[] computeFocus(float z) { int width = mDataset.WIDTH - 2 * mDataset.XCROP; int height = mDataset.HEIGHT - 2 * mDataset.YCROP; Mat result = new Mat(height, width, CvType.CV_32FC4); Mat result8 = new Mat(height, width, CvType.CV_8UC4); Mat dpc_result_tb = new Mat(height, width, CvType.CV_32FC4); Mat dpc_result_tb8 = new Mat(height, width, CvType.CV_8UC4); Mat dpc_result_lr = new Mat(height, width, CvType.CV_32FC4); Mat dpc_result_lr8 = new Mat(height, width, CvType.CV_8UC4); Mat img; Mat img32 = new Mat(height, width, CvType.CV_32FC4); Mat shifted;//w ww .j av a 2 s . c om for (int idx = 0; idx < mDataset.fileCount; idx++) { img = ImageUtils.toMat(BitmapFactory.decodeByteArray(fileByteList[idx], 0, fileByteList[idx].length)); img = img.submat(mDataset.YCROP, mDataset.HEIGHT - mDataset.YCROP, mDataset.XCROP, mDataset.WIDTH - mDataset.XCROP); img.convertTo(img32, result.type()); // Grab actual hole number from filename String fName = mDataset.fileList[idx].toString(); String hNum = fName.substring(fName.indexOf("_scanning_") + 10, fName.indexOf(".jpeg")); int holeNum = Integer.parseInt(hNum); //Log.d(TAG,String.format("BF Scan Header is: %s", hNum)); // Calculate these based on array coordinates int xShift = (int) Math.round(z * tanh_lit[holeNum]); int yShift = (int) Math.round(z * tanv_lit[holeNum]); shifted = ImageUtils.circularShift(img32, yShift, xShift); if (mDataset.leftList.contains(holeNum)) //add LHS { Core.add(dpc_result_lr, shifted, dpc_result_lr); } else //subtract RHS { Core.subtract(dpc_result_lr, shifted, dpc_result_lr); } if (mDataset.topList.contains(holeNum)) //add Top { Core.add(dpc_result_tb, shifted, dpc_result_tb); } else //subtract bottom { Core.subtract(dpc_result_tb, shifted, dpc_result_tb); } Core.add(result, shifted, result); float progress = ((idx + 1) / (float) mDataset.fileCount); onProgressUpdate((int) (progress * 100), -1); Log.d(TAG, String.format("progress: %f", progress)); } Core.MinMaxLocResult minMaxLocResult1 = Core.minMaxLoc(result.reshape(1)); result.convertTo(result8, CvType.CV_8UC4, 255 / minMaxLocResult1.maxVal); Core.MinMaxLocResult minMaxLocResult2 = Core.minMaxLoc(dpc_result_lr.reshape(1)); dpc_result_lr.convertTo(dpc_result_lr8, CvType.CV_8UC4, 255 / (minMaxLocResult2.maxVal - minMaxLocResult2.minVal), -minMaxLocResult2.minVal * 255.0 / (minMaxLocResult2.maxVal - minMaxLocResult2.minVal)); Core.MinMaxLocResult minMaxLocResult3 = Core.minMaxLoc(dpc_result_tb.reshape(1)); dpc_result_tb.convertTo(dpc_result_tb8, CvType.CV_8UC4, 255 / (minMaxLocResult3.maxVal - minMaxLocResult3.minVal), -minMaxLocResult3.minVal * 255.0 / (minMaxLocResult3.maxVal - minMaxLocResult3.minVal)); /* Log.d(TAG,String.format("result_min: %f, max: %f",minMaxLocResult1.minVal,minMaxLocResult1.maxVal)); Log.d(TAG,String.format("lr_min: %f, max: %f",minMaxLocResult2.minVal,minMaxLocResult2.maxVal)); Log.d(TAG,String.format("tb_min: %f, max: %f",minMaxLocResult3.minVal,minMaxLocResult3.maxVal)); */ // remove transparency in DPC images Scalar alphaMask = new Scalar(new double[] { 1.0, 1.0, 1.0, 255.0 }); Core.multiply(dpc_result_lr8, alphaMask, dpc_result_lr8); Core.multiply(dpc_result_tb8, alphaMask, dpc_result_tb8); if (!mDataset.USE_COLOR_DPC) { Imgproc.cvtColor(dpc_result_lr8, dpc_result_lr8, Imgproc.COLOR_BGR2GRAY); Imgproc.cvtColor(dpc_result_tb8, dpc_result_tb8, Imgproc.COLOR_BGR2GRAY); } /* // Cut off edges in DPC images Point centerPt = new Point(); centerPt.x = Math.round((float)width/2.0); centerPt.y = Math.round((float)height/2.0); Mat circleMat = new Mat(dpc_result_lr8.size(), dpc_result_lr8.type()); Scalar color = new Scalar(255); Core.circle(circleMat, centerPt, 200, color); //Core.bitwise_and(circleMat, dpc_result_lr8, dpc_result_lr8); //Core.bitwise_and(circleMat, dpc_result_tb8, dpc_result_tb8); * * */ Bitmap[] outputBitmaps = new Bitmap[3]; outputBitmaps[0] = ImageUtils.toBitmap(result8); outputBitmaps[1] = ImageUtils.toBitmap(dpc_result_lr8); outputBitmaps[2] = ImageUtils.toBitmap(dpc_result_tb8); return outputBitmaps; }
From source file:cpsd.ImageGUI.java
private void AnalyzeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_AnalyzeButtonActionPerformed try {/*from w ww. j a va 2 s . c o m*/ double pixelArea = 1; double imageSize = 1; if (magnification == 50) pixelArea = 1.2996; else if (magnification == 100) pixelArea = 0.329476; else if (magnification == 200) pixelArea = 0.08162; else { imageSize = pow(10, 10) * pow(magnification, -2); pixelArea = (imageSize) / (resolution1 * resolution2); } Mat source = ImageClass.getInstance().getImage(); Mat destination = new Mat(source.rows(), source.cols(), source.type()); //Imgproc.adaptiveThreshold(source,destination,255,ADAPTIVE_THRESH_GAUSSIAN_C,THRESH_BINARY_INV,13,2); //Imgproc.GaussianBlur(destination,destination,new org.opencv.core.Size(0,0),5); threshold(source, destination, 30, 255, CV_THRESH_BINARY); distanceTransform(destination, destination, CV_DIST_L2, 3); normalize(destination, destination, 0, 1, NORM_MINMAX); threshold(destination, destination, 0.5, 1, CV_THRESH_BINARY); destination.convertTo(destination, CV_8U); /*ImageClass.getInstance().setImage(destination); displayImage();*/ ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>(); MatOfInt4 hierarchy = new MatOfInt4(); // Rect roi = new Rect(100,100,destination.cols()-100,destination.rows()-100); // Mat imageROI = destination.submat(roi); /*ImageClass.getInstance().setImage(imageROI); displayImage();*/ //Imgproc.Canny(source,destination,0.05,0.15); Imgproc.findContours(destination, contours, hierarchy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE); int total = contours.size(); int count = 0; //System.out.println(total); MatOfPoint[] cntrs = contours.toArray(new MatOfPoint[contours.size()]); ArrayList<Double> value = new ArrayList<Double>(); double temp = 0; for (int i = 0; i < contours.size(); i++) { if (contourArea(cntrs[i]) > 1/*&& contourArea(cntrs[i])<3000*/) { temp = 2 * Math.sqrt((contourArea(cntrs[i]) * (pixelArea)) / Math.PI); //temp = contourArea(cntrs[i]); if (temp > 0) { value.add(count, temp); System.out.println("area of contour " + count++ + " is : " + contourArea(cntrs[i])); } } } System.out.println("total number of contours : " + count); double[] values = new double[count]; for (int i = 0; i < count; i++) { //temp = value.get(i);//2*Math.sqrt((value.get(i)*127.024)/Math.PI); //if(temp>0) values[i] = value.get(i); //System.out.println("the diameter of particle "+i+ "is "+values[i]); // values[i]=(contourArea(cntrs[i])*127.024)/100; } //int number = 300; /*HistogramDataset dataset = new HistogramDataset(); dataset.setType(HistogramType.FREQUENCY); XYSeries series = new XYSeries("Particle Size Distribution"); try{ dataset.addSeries("Histogram1",values,number,0,300); for(int i=0;i<300;i++){ if(dataset.getYValue(0,i)>0) series.add(dataset.getXValue(0,i),dataset.getYValue(0,i)); } // XYDataset xydataset = new XYSeriesCollection(series); }catch(Exception e) { e.printStackTrace(); } XYDataset xydataset = new XYSeriesCollection(series); String plotTitle = "Particle Size Distribution"; String xAxis = "particle diameter in microns"; String yAxis = "particle count"; PlotOrientation orientation = PlotOrientation.VERTICAL; boolean show = true; boolean toolTips = true; boolean urls = false; JFreeChart chart1 = ChartFactory.createXYLineChart(plotTitle,xAxis,yAxis,xydataset,orientation,show,toolTips,urls); JFreeChart chart2 = ChartFactory.createHistogram(plotTitle,xAxis,yAxis,dataset,orientation,show,toolTips,urls); int width1 = 500; int height1 = 500; ChartFrame frame1 = new ChartFrame("Coal PSD",chart1); frame1.setSize(width1,height1); frame1.setVisible(true); frame1.setDefaultCloseOperation(DISPOSE_ON_CLOSE); int width2 = 500; int height2 = 500; ChartFrame frame2 = new ChartFrame("Coal PSD",chart2); frame2.setSize(width2,height2); frame2.setVisible(true); frame2.setDefaultCloseOperation(DISPOSE_ON_CLOSE);*/ } catch (NullPointerException e) { System.err.println("..........Please load a valid Image.........."); } // TODO add your handling code here: }
From source file:cx.uni.jk.mms.iaip.filter.MatHelper.java
License:Open Source License
/** * converts any mat with 1/3/4 channels to an 8 bit BufferedImage with the * same number of channels. if the input mat is not CvType.CV_8U it is * converted to such with truncation of values to [0..255]. * /*from ww w .ja v a 2 s . c o m*/ * @param mat * @return the image */ public static BufferedImage convertMatTo8BitBufferedImage(Mat mat) { Mat byteMat; if (mat.depth() != CvType.CV_8U) { /** conversion to 8 bit Mat */ byteMat = new MatOfByte(); mat.convertTo(byteMat, CvType.CV_8U); } else { byteMat = mat; // just a reference! } /** encode to .bmp file in memory */ MatOfByte fileMat = new MatOfByte(); Highgui.imencode(".bmp", byteMat, fileMat); /** use file as input stream for BufferdImage */ byte[] byteArray = fileMat.toArray(); BufferedImage bufferedImage = null; try { InputStream in = new ByteArrayInputStream(byteArray); bufferedImage = ImageIO.read(in); } catch (Exception e) { logger.severe(e.getStackTrace().toString()); System.exit(e.hashCode()); } return bufferedImage; }
From source file:emotion.Eye.java
public void examineEyeOpeness(boolean rightEyeFlag) { Rect pureEyeRegion;/*w ww. ja v a2 s. c o m*/ //We take just middle half of strict eye region determined //by localized eye corners if (rightEyeFlag) { double regionWidth = EyeRegion.rightOuterEyeCorner.x - EyeRegion.rightInnerEyeCorner.x; pureEyeRegion = new Rect((int) (EyeRegion.rightInnerEyeCorner.x + regionWidth / 2 - 2), (int) (Eye.rightRect.y), (4), Eye.rightRect.height); imwrite("strictEyeRegRight.jpg", new Mat(EyeRegion._face, pureEyeRegion)); //Setting x coordinates of eyelids EyeRegion.rightLowerEyelid.x = (EyeRegion.rightOuterEyeCorner.x + EyeRegion.rightInnerEyeCorner.x) / 2; EyeRegion.rightUpperEyelid.x = EyeRegion.rightLowerEyelid.x; EyeRegion.rightEyeOpeness = (EyeRegion.rightUpperEyelid.y - EyeRegion.rightLowerEyelid.y); } else { double regionWidth; regionWidth = EyeRegion.leftInnerEyeCorner.x - EyeRegion.leftOuterEyeCorner.x; pureEyeRegion = new Rect((int) (regionWidth / 2 + EyeRegion.leftOuterEyeCorner.x - 2), (int) (Eye.leftRect.y), (4), Eye.leftRect.height); imwrite("leftEyeReg.jpg", new Mat(EyeRegion._face, pureEyeRegion)); //Setting x coordinates of eyelids EyeRegion.leftLowerEyelid.x = (EyeRegion.leftInnerEyeCorner.x + EyeRegion.leftOuterEyeCorner.x) / 2; EyeRegion.leftUpperEyelid.x = EyeRegion.leftLowerEyelid.x; EyeRegion.leftEyeOpeness = (EyeRegion.leftUpperEyelid.y - EyeRegion.leftLowerEyelid.y); } Mat strictEyeRegion = new Mat(EyeRegion._face, pureEyeRegion); Mat result = new Mat(); strictEyeRegion.convertTo(strictEyeRegion, CvType.CV_32F); Core.pow(strictEyeRegion, 1.27, strictEyeRegion); cvtColor(strictEyeRegion, strictEyeRegion, Imgproc.COLOR_BGR2GRAY); imwrite("improved.jpg", strictEyeRegion); threshold(strictEyeRegion, result, 100, 255, Imgproc.THRESH_BINARY_INV); Mat strEl = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 1)); dilate(result, result, strEl, new Point(1, 0), 3); for (int i = 0; i < result.width(); i++) { for (int j = 0; j < result.height() * 0.4; j++) { result.put(j, i, new double[] { 0, 0, 0 }); } } for (int j = result.height() - 1; j >= 0; j--) { if (result.get(j, 0)[0] == 255) { if (rightEyeFlag) { if (EyeRegion.rightLowerEyelid.y == 0) { EyeRegion.rightLowerEyelid.y = j + 3; EyeRegion.rightLowerEyelid.y += Eye.rightRect.y; } EyeRegion.rightUpperEyelid.y = j; EyeRegion.rightUpperEyelid.y += Eye.rightRect.y; } else { if (EyeRegion.leftLowerEyelid.y == 0) { EyeRegion.leftLowerEyelid.y = j + 3; EyeRegion.leftLowerEyelid.y += Eye.leftRect.y; } EyeRegion.leftUpperEyelid.y = j; EyeRegion.leftUpperEyelid.y += Eye.leftRect.y; } } } imwrite("openessResult.jpg", result); }
From source file:emotion.Eyebrow.java
public Eyebrow(EyeRegion eyeReg, boolean rightEyeFlag) { this.reg = eyeReg; Mat eye = rightEyeFlag ? Eye.rightEye.clone() : Eye.leftEye.clone(); Mat eyebrowROI = eye.clone(); //cvtColor(eyebrowROI, eyebrowROI, Imgproc.COLOR_BGR2GRAY); eyebrowROI.convertTo(eyebrowROI, CvType.CV_32F); // Vector<Mat> channels=new Vector<>(); // split(eyebrowROI,channels); // imwrite("eyebrowROI.png", channels.get(0)); Mat result = StaticFunctions.gabor(eyebrowROI); //threshold(result, result, 200,255, Imgproc.THRESH_BINARY_INV); imwrite("intermidiate.png", result); Harris(result, rightEyeFlag);/*ww w .j a va2s . c o m*/ imwrite("eyeafterGabor.png", result); }
From source file:emotion.Eyebrow.java
public static void Harris(Mat img, boolean rightEyeFlag) { //Harris point extraction Mat harrisTestimg; harrisTestimg = img.clone();//from w w w .j a v a2 s . c o m cvtColor(harrisTestimg, harrisTestimg, Imgproc.COLOR_BGR2GRAY); threshold(harrisTestimg, harrisTestimg, 200, 255, Imgproc.THRESH_BINARY_INV); Mat struct = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3)); erode(harrisTestimg, harrisTestimg, struct); dilate(harrisTestimg, harrisTestimg, struct); imwrite("intermediateHaaris.jpg", harrisTestimg); harrisTestimg.convertTo(harrisTestimg, CV_8UC1); ArrayList<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(harrisTestimg, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE); //System.out.println("Average Y for contours:"); float[] averageY = new float[contours.size()]; for (int i = 0; i < contours.size(); ++i) { //We calculate mean of Y coordinates for each contour for (int j = 0; j < contours.get(i).total(); ++j) { int val = (int) contours.get(i).toArray()[j].y; averageY[i] += val; } averageY[i] /= contours.get(i).total(); //System.out.println(i+") "+averageY[i]); if (averageY[i] <= img.height() / 2 && //We consider just up half of an image contours.get(i).total() >= img.width()) //and longer than threshold Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(255, 255, 255)); else Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(0, 0, 0)); } MatOfPoint features = new MatOfPoint(); Imgproc.goodFeaturesToTrack(harrisTestimg, features, 100, 0.00001, 0); //We draw just 2 extreme points- first and last Point eyebrowsPoints[] = new Point[2]; for (int i = 0; i < features.toList().size(); i++) { if (i == 0) { eyebrowsPoints[0] = new Point(harrisTestimg.width() / 2, 0); eyebrowsPoints[1] = new Point(harrisTestimg.width() / 2, 0); } if (features.toArray()[i].x < eyebrowsPoints[0].x && features.toArray()[i].y < harrisTestimg.height() / 2) { eyebrowsPoints[0] = features.toArray()[i]; } if (features.toArray()[i].x > eyebrowsPoints[1].x && features.toArray()[i].y < harrisTestimg.height() / 2) { eyebrowsPoints[1] = features.toArray()[i]; } } StaticFunctions.drawCross(img, eyebrowsPoints[1], StaticFunctions.Features.EYEBROWS_ENDS); StaticFunctions.drawCross(img, eyebrowsPoints[0], StaticFunctions.Features.EYEBROWS_ENDS); imwrite("testHaris.jpg", img); if (rightEyeFlag) { EyeRegion.rightInnerEyebrowsCorner = eyebrowsPoints[0]; EyeRegion.rightInnerEyebrowsCorner.x += Eye.rightRect.x; EyeRegion.rightInnerEyebrowsCorner.y += Eye.rightRect.y; EyeRegion.rightOuterEyebrowsCorner = eyebrowsPoints[1]; EyeRegion.rightOuterEyebrowsCorner.x += Eye.rightRect.x; EyeRegion.rightOuterEyebrowsCorner.y += Eye.rightRect.y; } else { EyeRegion.leftInnerEyebrowsCorner = eyebrowsPoints[1]; EyeRegion.leftInnerEyebrowsCorner.x += Eye.leftRect.x; EyeRegion.leftInnerEyebrowsCorner.y += Eye.leftRect.y; EyeRegion.leftOuterEyebrowsCorner = eyebrowsPoints[0]; EyeRegion.leftOuterEyebrowsCorner.x += Eye.leftRect.x; EyeRegion.leftOuterEyebrowsCorner.y += Eye.leftRect.y; } }
From source file:emotion.EyeRegion.java
public static void areEyebrowsWrinkles() { //setting parameters int height = (int) (abs(rightInnerEyebrowsCorner.y - rightInnerEyeCorner.y) * 1.2); int width = (int) (rightInnerEyeCorner.x - leftInnerEyeCorner.x); int y = (int) (rightInnerEyebrowsCorner.y - height / 2); int x = (int) leftInnerEyebrowsCorner.x; Rect wrinklesRect = new Rect(x, y, width, height); Mat wrinklesArea = new Mat(_face, wrinklesRect).clone(); wrinklesThreshold = (int) (wrinklesArea.width() * wrinklesArea.height() * 0.085); //Wrinkles between eyebrows are vertical int[] gradientMask = new int[9]; gradientMask[0] = -1;//from w w w. ja v a 2 s. c o m gradientMask[1] = 0; gradientMask[2] = 1; gradientMask[3] = -5; gradientMask[4] = 0; gradientMask[5] = 5; gradientMask[6] = -1; gradientMask[7] = 0; gradientMask[8] = 1; wrinklesArea.convertTo(wrinklesArea, CvType.CV_32F); Imgproc.cvtColor(wrinklesArea, wrinklesArea, Imgproc.COLOR_BGR2GRAY); Core.pow(wrinklesArea, 1.09, wrinklesArea); imwrite("wrinklesArea.jpg", wrinklesArea); wrinklesArea = StaticFunctions.convolution(gradientMask, wrinklesArea); threshold(wrinklesArea, wrinklesArea, 110, 255, Imgproc.THRESH_BINARY); imwrite("wrinklesAreaGradiented.jpg", wrinklesArea); long wrinklesPoints = 0; for (int i = 0; i < wrinklesArea.width(); i++) { for (int j = 0; j < wrinklesArea.height(); j++) { if (wrinklesArea.get(j, i)[0] == 255) { wrinklesPoints++; } } } EyeRegion.wrinklesFactor = wrinklesPoints; // System.out.println("Wrinkles factor: "+wrinklesPoints); if (wrinklesPoints >= wrinklesThreshold) { // System.out.println("Expression wrinkles detected! Threshold exceeded"); Imgproc.rectangle(EyeRegion._face, wrinklesRect.br(), wrinklesRect.tl(), new Scalar(0, 50, 205)); } }