List of usage examples for org.opencv.core Mat eye
public static Mat eye(int rows, int cols, int type)
From source file:de.vion.eyetracking.cameracalib.calibration.opencv.CameraCalibrator.java
public CameraCalibrator(int width, int height) { this.mImageSize = new Size(width, height); this.mFlags = Calib3d.CALIB_FIX_PRINCIPAL_POINT + Calib3d.CALIB_ZERO_TANGENT_DIST + Calib3d.CALIB_FIX_ASPECT_RATIO + Calib3d.CALIB_FIX_K4 + Calib3d.CALIB_FIX_K5; Mat.eye(3, 3, CvType.CV_64FC1).copyTo(this.mCameraMatrix); this.mCameraMatrix.put(0, 0, 1.0); Mat.zeros(5, 1, CvType.CV_64FC1).copyTo(this.mDistortionCoefficients); Log.i(TAG, "Instantiated new " + this.getClass()); }
From source file:edu.soict.hust.k57.mmdb.components.HistogramCaculator.java
@Override public void accept(ImgEnt t) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat mat = Mat.eye(3, 3, CvType.CV_8UC1); Mat m = Imgcodecs.imread(t.getF().getPath()); List<Mat> images = new ArrayList<Mat>(); Core.split(m, images);/* w w w . j a v a 2s . co m*/ MatOfInt histSize = new MatOfInt(t.getBin()); // kch thc ca histogram MatOfInt channels = new MatOfInt(0); // Knh mu mun tnh MatOfFloat histRange = new MatOfFloat(0, 256); Mat bHist = new Mat(); Mat gHist = new Mat(); Mat rHist = new Mat(); Imgproc.calcHist(images.subList(0, 1), channels, new Mat(), bHist, histSize, histRange, false); Core.normalize(bHist, bHist, 0, 1, Core.NORM_MINMAX, -1, new Mat()); Imgproc.calcHist(images.subList(1, 2), channels, new Mat(), gHist, histSize, histRange, false); Core.normalize(gHist, gHist, 0, 1, Core.NORM_MINMAX, -1, new Mat()); Imgproc.calcHist(images.subList(2, 3), channels, new Mat(), rHist, histSize, histRange, false); Core.normalize(rHist, rHist, 0, 1, Core.NORM_MINMAX, -1, new Mat()); t.setbHistogram(bHist); t.setgHistogram(gHist); t.setrHistogram(rHist); }
From source file:fi.conf.tabare.ARDataProvider.java
private void detect() { //Mat composite_image; Mat input_image = new Mat(); Mat undistorted_image = new Mat(); Mat circles = new Mat(); MatOfKeyPoint mokp = new MatOfKeyPoint(); Mat cameraMatrix = null;//from w w w . j a v a 2 s . c o m //List<Mat> channels = new LinkedList<>(); //Loop while (running) { try { if (inputVideo.read(input_image)) { Mat preview_image = null; if (selectedView == View.calib) preview_image = input_image.clone(); //Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_RGB2HSV); //Core.split(input_image, channels); Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_BGR2GRAY); //Imgproc.equalizeHist(input_image, input_image); input_image.convertTo(input_image, -1, params.contrast, params.brightness); //image*contrast[1.0-3.0] + brightness[0-255] doBlur(input_image, input_image, params.blur, params.blurAmount); if (selectedView == View.raw) preview_image = input_image.clone(); if (params.enableDistortion) { if (cameraMatrix == null) cameraMatrix = Imgproc.getDefaultNewCameraMatrix(Mat.eye(3, 3, CvType.CV_64F), new Size(input_image.width(), input_image.height()), true); Imgproc.warpAffine(input_image, input_image, shiftMat, frameSize); if (undistorted_image == null) undistorted_image = new Mat((int) frameSize.width * 2, (int) frameSize.height * 2, CvType.CV_64F); Imgproc.undistort(input_image, undistorted_image, cameraMatrix, distCoeffs); input_image = undistorted_image.clone(); if (selectedView == View.dist) preview_image = input_image.clone(); } // if(background == null) background = input_image.clone(); // if(recaptureBg){ // backgSubstractor.apply(background, background); // System.out.println(background.channels() + " " + background.size() ); // System.out.println(input_image.channels() + " " + input_image.size() ); // recaptureBg = false; // } // if(dynamicBGRemoval){ // //Imgproc.accumulateWeighted(input_image, background, dynamicBGAmount); // //Imgproc.accumulateWeighted(input_image, background, 1.0f); // //Core.subtract(input_image, background, input_image); // //Core.bitwise_xor(input_image, background, input_image); // // doBlur(input_image, background, Blur.normal_7x7, 0); //Blur a little, to get nicer result when substracting // backgSubstractor.apply(background, background, dynamicBGAmount); // } // if(background != null) Core.add(input_image, background, input_image); if (params.blobTracking) { Mat blobs_image = input_image.clone(); Imgproc.threshold(blobs_image, blobs_image, params.blobThreshold, 254, (params.blobThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY)); Size kernelSize = null; switch (params.blobMorpthKernelSize) { case size_3x3: kernelSize = new Size(3, 3); break; case size_5x5: kernelSize = new Size(5, 5); break; case size_7x7: kernelSize = new Size(7, 7); break; case size_9x9: kernelSize = new Size(9, 9); break; } int kernelType = -1; switch (params.blobMorphKernelShape) { case ellipse: kernelType = Imgproc.MORPH_ELLIPSE; break; case rect: kernelType = Imgproc.MORPH_RECT; break; default: break; } switch (params.blobMorphOps) { case dilate: Imgproc.dilate(blobs_image, blobs_image, Imgproc.getStructuringElement(kernelType, kernelSize)); break; case erode: Imgproc.erode(blobs_image, blobs_image, Imgproc.getStructuringElement(kernelType, kernelSize)); break; default: break; } if (blobFeatureDetector == null) blobFeatureDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB); blobFeatureDetector.detect(blobs_image, mokp); blobData.add(mokp); if (selectedView == View.blob) preview_image = blobs_image.clone(); blobs_image.release(); } if (params.tripTracking) { Mat trips_image = undistorted_image.clone(); if (params.tripEnableThresholding) if (params.tripAdaptThreshold) { Imgproc.adaptiveThreshold(trips_image, trips_image, 255, (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY), Imgproc.ADAPTIVE_THRESH_MEAN_C, 5, params.tripThreshold * 0.256f); } else { Imgproc.threshold(trips_image, trips_image, params.tripThreshold, 255, (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY)); } switch (params.tripMorphOps) { case dilate: Imgproc.dilate(trips_image, trips_image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3))); break; case erode: Imgproc.erode(trips_image, trips_image, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3))); break; default: break; } //Imgproc.HoughCircles(tres, circ, Imgproc.CV_HOUGH_GRADIENT, 1, tres.height()/8, 80, 1+p.par4, p.par5, p.par6); Imgproc.HoughCircles(trips_image, circles, Imgproc.CV_HOUGH_GRADIENT, params.tripDP, params.tripCenterDist, params.tripCannyThresh, params.tripAccumThresh, params.tripRadMin, params.tripRadMax); for (int i = 0; i < circles.cols(); i++) { double[] coords = circles.get(0, i); if (coords == null || coords[0] <= 1 || coords[1] <= 1) continue; //If the circle is off the limits, or too small, don't process it. TripcodeCandidateSample tc = new TripcodeCandidateSample(undistorted_image, coords); if (tc.isValid()) tripcodeData.add(tc); } if (selectedView == View.trip) preview_image = trips_image.clone(); trips_image.release(); } if (preview_image != null) { camPreviewPanel.updatePreviewImage(preview_image); preview_image.release(); } } else { System.out.println("frame/cam failiure!"); } } catch (Exception e) { e.printStackTrace(); running = false; } //FPS calculations if (camPreviewPanel != null) { long t = System.currentTimeMillis(); detectTime = (t - lastFrameDetectTime); lastFrameDetectTime = t; camPreviewPanel.updateDetectTime(detectTime); } } //De-init circles.release(); undistorted_image.release(); input_image.release(); inputVideo.release(); shiftMat.release(); }
From source file:gov.nasa.jpl.memex.pooledtimeseries.healthcheck.CheckOpenCV.java
License:Apache License
public static void main(String[] args) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat mat = Mat.eye(3, 3, CvType.CV_8UC1); System.out.println("mat = " + mat.dump()); String filename = args[0];//from w ww. j a v a2 s . c o m System.out.println("opening video file " + filename); VideoCapture capture = new VideoCapture(filename.toString()); if (!capture.isOpened()) { System.out.println("video file " + filename + " could not be opened."); } }
From source file:javacv.JavaCV.java
/** * @param args the command line arguments *//*from w w w . j a v a2s . c om*/ public static void main(String[] args) { // TODO code application logic here System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat mat = Mat.eye(3, 3, CvType.CV_8UC1); System.out.println("mat = " + mat.dump()); CascadeClassifier faceDetector = new CascadeClassifier("./data/lbpcascade_frontalface.xml"); //CascadeClassifier faceDetector = new CascadeClassifier(); JFrame frame = new JFrame("BasicPanel"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setSize(400, 400); JavaCV panel = new JavaCV(); frame.setContentPane(panel); frame.setVisible(true); Mat webcam_image = new Mat(); BufferedImage temp; VideoCapture capture; capture = new VideoCapture(0); if (capture.isOpened()) { while (true) { capture.read(webcam_image); if (!webcam_image.empty()) { frame.setSize(webcam_image.width() + 40, webcam_image.height() + 60); MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(webcam_image, faceDetections); //System.out.println(String.format("Detected %s faces", faceDetections.toArray().length)); // Draw a bounding box around each face. for (Rect rect : faceDetections.toArray()) { Core.rectangle(webcam_image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); } temp = matToBufferedImage(webcam_image); panel.setimage(temp); panel.repaint(); } else { System.out.println(" --(!) No captured frame -- Break!"); break; } } } return; }
From source file:neuroimagingdataportal.EdgedetectProcessing.java
public String process(int value1, int value2) { this.threshold1 = value1; this.threshold2 = value2; System.out.println("Welcome to OpenCV " + Core.VERSION); System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat m = Mat.eye(3, 3, CvType.CV_8UC1); System.out.println("m = " + m.dump()); Mat src1;//w w w. j a v a 2 s. c o m //if ("null".equals(this.url)) { // JOptionPane.showMessageDialog(null, "Please select image to process", "WARNING_MESSAGE", JOptionPane.WARNING_MESSAGE); // return null; //} else { src1 = imread(url, CV_LOAD_IMAGE_COLOR); System.out.println(" read the file" + url); // Mat gray, edge, draw; gray = new Mat(); cvtColor(src1, gray, COLOR_BGR2GRAY); edge = new Mat(); draw = new Mat(); /* void Canny(InputArray image, OutputArray edges, double threshold1, double threshold2, int apertureSize=3, bool L2gradient=false ) Parameters: image single-channel 8-bit input image. edges output edge map; it has the same size and type as image . threshold1 first threshold for the hysteresis procedure. threshold2 second threshold for the hysteresis procedure. apertureSize aperture size for the Sobel() operator. L2gradient a flag, indicating whether a more accurate norm. */ Canny(gray, edge, threshold1, threshold2, 3, false); edge.convertTo(draw, CV_8U); // write draw mat to the bufferedImage as I think...... //BufferedImage drawImage = ImageIO.read( (Image) draw); Mat src2; saveUrl = currentDirectory + "\\" + count + "converted.jpg"; boolean imwrite; imwrite = imwrite(saveUrl, draw); System.out.println(" write the image to given file path !!!"); return saveUrl; //} }
From source file:opencvdemos.OpenCVDemos.java
/** * @param args the command line arguments */// w ww .ja v a 2 s .c om public static void main(String[] args) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat mat = Mat.eye(3, 3, CvType.CV_8UC1); System.out.println("mat = " + mat.dump()); }
From source file:opencvtuto1.Opencvtuto1.java
/** * @param args the command line arguments */// www .ja va 2 s .c om public static void main(String[] args) { // TODO code application logic here System.out.println("hola mundo opencv version" + Core.VERSION); System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat m = Mat.eye(8, 8, CvType.CV_8UC1); System.out.println("m=" + m.dump()); }
From source file:org.openpnp.machine.reference.ReferenceCamera.java
License:Open Source License
private Mat undistort(Mat mat) { if (!calibration.isEnabled()) { return mat; }// w w w. j ava2 s . c om if (undistortionMap1 == null || undistortionMap2 == null) { undistortionMap1 = new Mat(); undistortionMap2 = new Mat(); Mat rectification = Mat.eye(3, 3, CvType.CV_32F); Imgproc.initUndistortRectifyMap(calibration.getCameraMatrixMat(), calibration.getDistortionCoefficientsMat(), rectification, calibration.getCameraMatrixMat(), mat.size(), CvType.CV_32FC1, undistortionMap1, undistortionMap2); rectification.release(); } Mat dst = mat.clone(); Imgproc.remap(mat, dst, undistortionMap1, undistortionMap2, Imgproc.INTER_LINEAR); mat.release(); return dst; }
From source file:org.usfirst.frc.team2084.CMonster2016.vision.CameraCalibration.java
License:Open Source License
/** * Calibrate the camera. This goes through all the corners in the list and * calibrates based off them./*from w w w . ja v a 2 s. c o m*/ * * @return the reprojection error */ public double calibrate() { cameraMatrix = Mat.eye(3, 3, CvType.CV_64F); distCoeffs = new MatOfDouble(Mat.zeros(8, 1, CvType.CV_64F)); List<Mat> rvecs = new LinkedList<>(); List<Mat> tvecs = new LinkedList<>(); // Set the fixed aspect ratio cameraMatrix.put(0, 0, aspectRatio); List<Mat> objectPoints = Collections.nCopies(calibrationCorners.size(), calcBoardCornerPositions()); System.out.println(cameraMatrix); return error = Calib3d.calibrateCamera(objectPoints, calibrationCorners, HighGoalProcessor.IMAGE_SIZE, cameraMatrix, distCoeffs, rvecs, tvecs, Calib3d.CALIB_FIX_PRINCIPAL_POINT); }