List of usage examples for org.opencv.core Mat height
public int height()
From source file:edu.wpi.cscore.RawCVMatSource.java
License:Open Source License
/** * Put an OpenCV image and notify sinks. * * <p>Only 8-bit single-channel or 3-channel (with BGR channel order) images * are supported. If the format, depth or channel order is different, use * Mat.convertTo() and/or cvtColor() to convert it first. * * @param image OpenCV image/*from ww w . j ava 2s . c om*/ */ public void putFrame(Mat image) { int channels = image.channels(); if (channels != 1 && channels != 3) { throw new VideoException("Unsupported Image Type"); } int imgType = channels == 1 ? PixelFormat.kGray.getValue() : PixelFormat.kBGR.getValue(); CameraServerJNI.putRawSourceFrame(m_handle, image.dataAddr(), image.width(), image.height(), imgType, (int) image.total() * channels); }
From source file:emotion.Eye.java
private void templatingOuterCorner(Mat eyeRegion, boolean rightEyeFlag) { // Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\" // + "Face database\\eyecorners\\rightOuter.jpg",CV_8UC1); Mat template = imread("src\\Templates\\rightOuter.jpg", CV_8UC1); Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1); cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY); temp = rightEyeFlag//from w w w. j a v a 2 s . c o m ? new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height())) : new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height())); Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type()); //(9,9)- coordinates of eye outerCorner in the template if (rightEyeFlag) { imwrite("rightEyeForOuterTemplating.jpg", temp); Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED); Core.normalize(result, result, 0, 100, Core.NORM_MINMAX); Core.MinMaxLocResult maxVal = Core.minMaxLoc(result); //(9,9)- coordinates of eye outerCorner in the template Point outerCorner = new Point(maxVal.maxLoc.x + 9, maxVal.maxLoc.y + 9); //Adjust coordinates according to whole face outerCorner.y += Eye.rightRect.y; outerCorner.x += Eye.rightRect.x; outerCorner.x += temp.width(); //We examine just right half on the right eye //////////////////////////////////////////// EyeRegion.rightOuterEyeCorner = outerCorner; } else { imwrite("leftEyeForOuterTemplating.jpg", temp); Core.flip(template, template, 1); Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED); Core.normalize(result, result, 0, 100, Core.NORM_MINMAX); Core.MinMaxLocResult maxVal = Core.minMaxLoc(result); Point outerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 9); //Adjust coordinates according to whole face outerCorner.y += Eye.leftRect.y; outerCorner.x += Eye.leftRect.x; //////////////////////////////////////////// EyeRegion.leftOuterEyeCorner = outerCorner; } //Mat tempw=reg._face.clone(); //Face.drawCross(tempw, outerCorner); //imwrite("checkcorner.png",tempw); }
From source file:emotion.Eye.java
private void templatingInnerCorner(Mat eyeRegion, boolean rightEyeFlag) { // Mat template=imread("E:\\Studia\\II YEAR\\Team Project\\" // + "Face database\\eyecorners\\rightInner.jpg",CV_8UC1); Mat template = imread("src\\Templates\\rightInner.jpg", CV_8UC1); Mat temp = new Mat(eyeRegion.width(), eyeRegion.height(), CV_8UC1); cvtColor(eyeRegion, temp, Imgproc.COLOR_BGR2GRAY); temp = rightEyeFlag ? new Mat(temp, new Rect(0, 0, (int) (temp.width() * 0.5), temp.height())) : new Mat(temp, new Rect((int) (temp.width() * 0.5), 0, (int) (temp.width() * 0.5), temp.height())); Mat result = new Mat(eyeRegion.width(), eyeRegion.height(), eyeRegion.type()); //(4,7)- coordinates of eye innerCorner in the template if (rightEyeFlag) { imwrite("template4righteye.jpg", template); imwrite("rightEyeForInnerTemplating.jpg", temp); Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED); Core.normalize(result, result, 0, 100, Core.NORM_MINMAX); Core.MinMaxLocResult maxVal = Core.minMaxLoc(result); //(4,7)- coordinates of eye innerCorner in the template Point innerCorner = new Point(maxVal.maxLoc.x + 4, maxVal.maxLoc.y + 7); StaticFunctions.drawCross(temp, innerCorner, StaticFunctions.Features.EYE_CORNERS); imwrite("rightEyeForInnerTemplating.jpg", temp); //Adjust coordinates according to whole face innerCorner.y += Eye.rightRect.y; innerCorner.x += Eye.rightRect.x; //We examine just left half on the right eye //////////////////////////////////////////// EyeRegion.rightInnerEyeCorner = innerCorner; } else {// ww w. j a va 2 s . c o m imwrite("leftEyeForInnerTemplating.jpg", temp); Core.flip(template, template, 1); Imgproc.matchTemplate(temp, template, result, Imgproc.TM_CCOEFF_NORMED); Core.normalize(result, result, 0, 100, Core.NORM_MINMAX); Core.MinMaxLocResult maxVal = Core.minMaxLoc(result); Point innerCorner = new Point(maxVal.maxLoc.x + 8, maxVal.maxLoc.y + 7); //Adjust coordinates according to whole face innerCorner.y += Eye.leftRect.y; innerCorner.x += Eye.leftRect.x; //We examine just right half on the left eye innerCorner.x += temp.width(); //////////////////////////////////////////// EyeRegion.leftInnerEyeCorner = innerCorner; } }
From source file:emotion.Eye.java
public void examineEyeOpeness(boolean rightEyeFlag) { Rect pureEyeRegion;//from w ww . jav a 2 s . com //We take just middle half of strict eye region determined //by localized eye corners if (rightEyeFlag) { double regionWidth = EyeRegion.rightOuterEyeCorner.x - EyeRegion.rightInnerEyeCorner.x; pureEyeRegion = new Rect((int) (EyeRegion.rightInnerEyeCorner.x + regionWidth / 2 - 2), (int) (Eye.rightRect.y), (4), Eye.rightRect.height); imwrite("strictEyeRegRight.jpg", new Mat(EyeRegion._face, pureEyeRegion)); //Setting x coordinates of eyelids EyeRegion.rightLowerEyelid.x = (EyeRegion.rightOuterEyeCorner.x + EyeRegion.rightInnerEyeCorner.x) / 2; EyeRegion.rightUpperEyelid.x = EyeRegion.rightLowerEyelid.x; EyeRegion.rightEyeOpeness = (EyeRegion.rightUpperEyelid.y - EyeRegion.rightLowerEyelid.y); } else { double regionWidth; regionWidth = EyeRegion.leftInnerEyeCorner.x - EyeRegion.leftOuterEyeCorner.x; pureEyeRegion = new Rect((int) (regionWidth / 2 + EyeRegion.leftOuterEyeCorner.x - 2), (int) (Eye.leftRect.y), (4), Eye.leftRect.height); imwrite("leftEyeReg.jpg", new Mat(EyeRegion._face, pureEyeRegion)); //Setting x coordinates of eyelids EyeRegion.leftLowerEyelid.x = (EyeRegion.leftInnerEyeCorner.x + EyeRegion.leftOuterEyeCorner.x) / 2; EyeRegion.leftUpperEyelid.x = EyeRegion.leftLowerEyelid.x; EyeRegion.leftEyeOpeness = (EyeRegion.leftUpperEyelid.y - EyeRegion.leftLowerEyelid.y); } Mat strictEyeRegion = new Mat(EyeRegion._face, pureEyeRegion); Mat result = new Mat(); strictEyeRegion.convertTo(strictEyeRegion, CvType.CV_32F); Core.pow(strictEyeRegion, 1.27, strictEyeRegion); cvtColor(strictEyeRegion, strictEyeRegion, Imgproc.COLOR_BGR2GRAY); imwrite("improved.jpg", strictEyeRegion); threshold(strictEyeRegion, result, 100, 255, Imgproc.THRESH_BINARY_INV); Mat strEl = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 1)); dilate(result, result, strEl, new Point(1, 0), 3); for (int i = 0; i < result.width(); i++) { for (int j = 0; j < result.height() * 0.4; j++) { result.put(j, i, new double[] { 0, 0, 0 }); } } for (int j = result.height() - 1; j >= 0; j--) { if (result.get(j, 0)[0] == 255) { if (rightEyeFlag) { if (EyeRegion.rightLowerEyelid.y == 0) { EyeRegion.rightLowerEyelid.y = j + 3; EyeRegion.rightLowerEyelid.y += Eye.rightRect.y; } EyeRegion.rightUpperEyelid.y = j; EyeRegion.rightUpperEyelid.y += Eye.rightRect.y; } else { if (EyeRegion.leftLowerEyelid.y == 0) { EyeRegion.leftLowerEyelid.y = j + 3; EyeRegion.leftLowerEyelid.y += Eye.leftRect.y; } EyeRegion.leftUpperEyelid.y = j; EyeRegion.leftUpperEyelid.y += Eye.leftRect.y; } } } imwrite("openessResult.jpg", result); }
From source file:emotion.Eye.java
private Rect recalculate(Rect _input, Mat canvas) { Rect output = new Rect(); int width = (int) (_input.width * 1.3); int height = (int) (_input.height * 1.3); output.x = _input.x - (width - _input.width) / 2; output.y = _input.y - (height) / 4;/*from w w w . ja va 2 s. c o m*/ if (output.x < 0) { output.x = 0; } else if (output.x >= canvas.width()) { output.x = canvas.width() - 1; } if (output.y < 0) { output.y = 0; } else if (output.y >= canvas.height()) { output.y = canvas.height() - 1; } output.width = width; output.height = height; return output; }
From source file:emotion.Eyebrow.java
public static void Harris(Mat img, boolean rightEyeFlag) { //Harris point extraction Mat harrisTestimg;// w w w . jav a 2 s.c om harrisTestimg = img.clone(); cvtColor(harrisTestimg, harrisTestimg, Imgproc.COLOR_BGR2GRAY); threshold(harrisTestimg, harrisTestimg, 200, 255, Imgproc.THRESH_BINARY_INV); Mat struct = Imgproc.getStructuringElement(Imgproc.MORPH_CROSS, new Size(3, 3)); erode(harrisTestimg, harrisTestimg, struct); dilate(harrisTestimg, harrisTestimg, struct); imwrite("intermediateHaaris.jpg", harrisTestimg); harrisTestimg.convertTo(harrisTestimg, CV_8UC1); ArrayList<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(harrisTestimg, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE); //System.out.println("Average Y for contours:"); float[] averageY = new float[contours.size()]; for (int i = 0; i < contours.size(); ++i) { //We calculate mean of Y coordinates for each contour for (int j = 0; j < contours.get(i).total(); ++j) { int val = (int) contours.get(i).toArray()[j].y; averageY[i] += val; } averageY[i] /= contours.get(i).total(); //System.out.println(i+") "+averageY[i]); if (averageY[i] <= img.height() / 2 && //We consider just up half of an image contours.get(i).total() >= img.width()) //and longer than threshold Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(255, 255, 255)); else Imgproc.drawContours(harrisTestimg, contours, i, new Scalar(0, 0, 0)); } MatOfPoint features = new MatOfPoint(); Imgproc.goodFeaturesToTrack(harrisTestimg, features, 100, 0.00001, 0); //We draw just 2 extreme points- first and last Point eyebrowsPoints[] = new Point[2]; for (int i = 0; i < features.toList().size(); i++) { if (i == 0) { eyebrowsPoints[0] = new Point(harrisTestimg.width() / 2, 0); eyebrowsPoints[1] = new Point(harrisTestimg.width() / 2, 0); } if (features.toArray()[i].x < eyebrowsPoints[0].x && features.toArray()[i].y < harrisTestimg.height() / 2) { eyebrowsPoints[0] = features.toArray()[i]; } if (features.toArray()[i].x > eyebrowsPoints[1].x && features.toArray()[i].y < harrisTestimg.height() / 2) { eyebrowsPoints[1] = features.toArray()[i]; } } StaticFunctions.drawCross(img, eyebrowsPoints[1], StaticFunctions.Features.EYEBROWS_ENDS); StaticFunctions.drawCross(img, eyebrowsPoints[0], StaticFunctions.Features.EYEBROWS_ENDS); imwrite("testHaris.jpg", img); if (rightEyeFlag) { EyeRegion.rightInnerEyebrowsCorner = eyebrowsPoints[0]; EyeRegion.rightInnerEyebrowsCorner.x += Eye.rightRect.x; EyeRegion.rightInnerEyebrowsCorner.y += Eye.rightRect.y; EyeRegion.rightOuterEyebrowsCorner = eyebrowsPoints[1]; EyeRegion.rightOuterEyebrowsCorner.x += Eye.rightRect.x; EyeRegion.rightOuterEyebrowsCorner.y += Eye.rightRect.y; } else { EyeRegion.leftInnerEyebrowsCorner = eyebrowsPoints[1]; EyeRegion.leftInnerEyebrowsCorner.x += Eye.leftRect.x; EyeRegion.leftInnerEyebrowsCorner.y += Eye.leftRect.y; EyeRegion.leftOuterEyebrowsCorner = eyebrowsPoints[0]; EyeRegion.leftOuterEyebrowsCorner.x += Eye.leftRect.x; EyeRegion.leftOuterEyebrowsCorner.y += Eye.leftRect.y; } }
From source file:emotion.EyeRegion.java
public static void areEyebrowsWrinkles() { //setting parameters int height = (int) (abs(rightInnerEyebrowsCorner.y - rightInnerEyeCorner.y) * 1.2); int width = (int) (rightInnerEyeCorner.x - leftInnerEyeCorner.x); int y = (int) (rightInnerEyebrowsCorner.y - height / 2); int x = (int) leftInnerEyebrowsCorner.x; Rect wrinklesRect = new Rect(x, y, width, height); Mat wrinklesArea = new Mat(_face, wrinklesRect).clone(); wrinklesThreshold = (int) (wrinklesArea.width() * wrinklesArea.height() * 0.085); //Wrinkles between eyebrows are vertical int[] gradientMask = new int[9]; gradientMask[0] = -1;/*from w w w . j ava2s . c o m*/ gradientMask[1] = 0; gradientMask[2] = 1; gradientMask[3] = -5; gradientMask[4] = 0; gradientMask[5] = 5; gradientMask[6] = -1; gradientMask[7] = 0; gradientMask[8] = 1; wrinklesArea.convertTo(wrinklesArea, CvType.CV_32F); Imgproc.cvtColor(wrinklesArea, wrinklesArea, Imgproc.COLOR_BGR2GRAY); Core.pow(wrinklesArea, 1.09, wrinklesArea); imwrite("wrinklesArea.jpg", wrinklesArea); wrinklesArea = StaticFunctions.convolution(gradientMask, wrinklesArea); threshold(wrinklesArea, wrinklesArea, 110, 255, Imgproc.THRESH_BINARY); imwrite("wrinklesAreaGradiented.jpg", wrinklesArea); long wrinklesPoints = 0; for (int i = 0; i < wrinklesArea.width(); i++) { for (int j = 0; j < wrinklesArea.height(); j++) { if (wrinklesArea.get(j, i)[0] == 255) { wrinklesPoints++; } } } EyeRegion.wrinklesFactor = wrinklesPoints; // System.out.println("Wrinkles factor: "+wrinklesPoints); if (wrinklesPoints >= wrinklesThreshold) { // System.out.println("Expression wrinkles detected! Threshold exceeded"); Imgproc.rectangle(EyeRegion._face, wrinklesRect.br(), wrinklesRect.tl(), new Scalar(0, 50, 205)); } }
From source file:emotion.StaticFunctions.java
public static Mat gabor(Mat image) { Mat img = image.clone(); double ksize = 15; double sigme = 4; double gamma = 1; double psi = 50; int lambd[] = new int[] { 5, 6, 7, 10/*,15,13,2*/ }; double theta[] = new double[] { 180, 200 }; ArrayList<Mat> kernels = new ArrayList<>(); for (int i = 0; i < theta.length; i++) { for (int j = 0; j < lambd.length; j++) { kernels.add(Imgproc.getGaborKernel(new Size(ksize, ksize), sigme, theta[i], lambd[j], gamma, psi, CvType.CV_32F));/*from w w w . j a v a 2 s. co m*/ } } Mat result = new Mat(img.height(), img.width(), img.type(), new Scalar(0, 0, 0)); for (Mat kernel : kernels) { Mat temp = new Mat(img.height(), img.width(), img.type(), new Scalar(0, 0, 0)); Imgproc.filter2D(img, temp, -1, kernel); Core.add(result, temp, result); } //imwrite("gaborResult.jpg",result); return result; }
From source file:es.ugr.osgiliart.features.opencv.Histogram.java
License:Open Source License
@Override public double[] extract(Mat image) { Mat hsvImage = new Mat(image.width(), image.height(), image.type()); Mat histHue = new Mat(); Mat histSaturation = new Mat(); Imgproc.cvtColor(image, hsvImage, Imgproc.COLOR_BGR2HSV); List<Mat> channels = new ArrayList<Mat>(); Core.split(hsvImage, channels);//www . j a v a2 s . co m //Histogram for hue Imgproc.calcHist(Arrays.asList(new Mat[] { channels.get(0) }), new MatOfInt(0), new Mat(), histHue, new MatOfInt(BINS), new MatOfFloat(MIN_VALUE, MAX_VALUE)); //Histogram for saturation Imgproc.calcHist(Arrays.asList(new Mat[] { channels.get(1) }), new MatOfInt(0), new Mat(), histSaturation, new MatOfInt(BINS), new MatOfFloat(MIN_VALUE, MAX_VALUE)); double sum = Core.sumElems(histHue).val[0]; double[] values = new double[histHue.height() + histSaturation.height()]; int k = 0; for (int i = 0; i < histHue.height(); ++i) { values[k++] = histHue.get(i, 0)[0] / sum; } sum = Core.sumElems(histSaturation).val[0]; for (int i = 0; i < histSaturation.height(); ++i) { values[k++] = histSaturation.get(i, 0)[0] / sum; } return values; }
From source file:fi.conf.tabare.ARDataProvider.java
private void detect() { //Mat composite_image; Mat input_image = new Mat(); Mat undistorted_image = new Mat(); Mat circles = new Mat(); MatOfKeyPoint mokp = new MatOfKeyPoint(); Mat cameraMatrix = null;// w w w . j av a2 s. co m //List<Mat> channels = new LinkedList<>(); //Loop while (running) { try { if (inputVideo.read(input_image)) { Mat preview_image = null; if (selectedView == View.calib) preview_image = input_image.clone(); //Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_RGB2HSV); //Core.split(input_image, channels); Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_BGR2GRAY); //Imgproc.equalizeHist(input_image, input_image); input_image.convertTo(input_image, -1, params.contrast, params.brightness); //image*contrast[1.0-3.0] + brightness[0-255] doBlur(input_image, input_image, params.blur, params.blurAmount); if (selectedView == View.raw) preview_image = input_image.clone(); if (params.enableDistortion) { if (cameraMatrix == null) cameraMatrix = Imgproc.getDefaultNewCameraMatrix(Mat.eye(3, 3, CvType.CV_64F), new Size(input_image.width(), input_image.height()), true); Imgproc.warpAffine(input_image, input_image, shiftMat, frameSize); if (undistorted_image == null) undistorted_image = new Mat((int) frameSize.width * 2, (int) frameSize.height * 2, CvType.CV_64F); Imgproc.undistort(input_image, undistorted_image, cameraMatrix, distCoeffs); input_image = undistorted_image.clone(); if (selectedView == View.dist) preview_image = input_image.clone(); } // if(background == null) background = input_image.clone(); // if(recaptureBg){ // backgSubstractor.apply(background, background); // System.out.println(background.channels() + " " + background.size() ); // System.out.println(input_image.channels() + " " + input_image.size() ); // recaptureBg = false; // } // if(dynamicBGRemoval){ // //Imgproc.accumulateWeighted(input_image, background, dynamicBGAmount); // //Imgproc.accumulateWeighted(input_image, background, 1.0f); // //Core.subtract(input_image, background, input_image); // //Core.bitwise_xor(input_image, background, input_image); // // doBlur(input_image, background, Blur.normal_7x7, 0); //Blur a little, to get nicer result when substracting // backgSubstractor.apply(background, background, dynamicBGAmount); // } // if(background != null) Core.add(input_image, background, input_image); if (params.blobTracking) { Mat blobs_image = input_image.clone(); Imgproc.threshold(blobs_image, blobs_image, params.blobThreshold, 254, (params.blobThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY)); Size kernelSize = null; switch (params.blobMorpthKernelSize) { case size_3x3: kernelSize = new Size(3, 3); break; case size_5x5: kernelSize = new Size(5, 5); break; case size_7x7: kernelSize = new Size(7, 7); break; case size_9x9: kernelSize = new Size(9, 9); break; } int kernelType = -1; switch (params.blobMorphKernelShape) { case ellipse: kernelType = Imgproc.MORPH_ELLIPSE; break; case rect: kernelType = Imgproc.MORPH_RECT; break; default: break; } switch (params.blobMorphOps) { case dilate: Imgproc.dilate(blobs_image, blobs_image, Imgproc.getStructuringElement(kernelType, kernelSize)); break; case erode: Imgproc.erode(blobs_image, blobs_image, Imgproc.getStructuringElement(kernelType, kernelSize)); break; default: break; } if (blobFeatureDetector == null) blobFeatureDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB); blobFeatureDetector.detect(blobs_image, mokp); blobData.add(mokp); if (selectedView == View.blob) preview_image = blobs_image.clone(); blobs_image.release(); } if (params.tripTracking) { Mat trips_image = undistorted_image.clone(); if (params.tripEnableThresholding) if (params.tripAdaptThreshold) { Imgproc.adaptiveThreshold(trips_image, trips_image, 255, (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY), Imgproc.ADAPTIVE_THRESH_MEAN_C, 5, params.tripThreshold * 0.256f); } else { Imgproc.threshold(trips_image, trips_image, params.tripThreshold, 255, (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY)); } switch (params.tripMorphOps) { case dilate: Imgproc.dilate(trips_image, trips_image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3))); break; case erode: Imgproc.erode(trips_image, trips_image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3))); break; default: break; } //Imgproc.HoughCircles(tres, circ, Imgproc.CV_HOUGH_GRADIENT, 1, tres.height()/8, 80, 1+p.par4, p.par5, p.par6); Imgproc.HoughCircles(trips_image, circles, Imgproc.CV_HOUGH_GRADIENT, params.tripDP, params.tripCenterDist, params.tripCannyThresh, params.tripAccumThresh, params.tripRadMin, params.tripRadMax); for (int i = 0; i < circles.cols(); i++) { double[] coords = circles.get(0, i); if (coords == null || coords[0] <= 1 || coords[1] <= 1) continue; //If the circle is off the limits, or too small, don't process it. TripcodeCandidateSample tc = new TripcodeCandidateSample(undistorted_image, coords); if (tc.isValid()) tripcodeData.add(tc); } if (selectedView == View.trip) preview_image = trips_image.clone(); trips_image.release(); } if (preview_image != null) { camPreviewPanel.updatePreviewImage(preview_image); preview_image.release(); } } else { System.out.println("frame/cam failiure!"); } } catch (Exception e) { e.printStackTrace(); running = false; } //FPS calculations if (camPreviewPanel != null) { long t = System.currentTimeMillis(); detectTime = (t - lastFrameDetectTime); lastFrameDetectTime = t; camPreviewPanel.updateDetectTime(detectTime); } } //De-init circles.release(); undistorted_image.release(); input_image.release(); inputVideo.release(); shiftMat.release(); }