List of usage examples for org.opencv.core Mat Mat
public Mat()
From source file:com.mycompany.objectdetection.ObjectDetector.java
public ObjectDetector(String fileName) { img = Imgcodecs.imread(fileName);//w ww. j a v a 2 s.c om imgOut = Imgcodecs.imread(fileName); contours = new ArrayList(); imgMeanShifted = new Mat(); imgGrayscale = new Mat(); imgCanny = new Mat(); objList = new ArrayList(); mainObjects = new ArrayList(); mainRect = null; mRgba = new Mat(); }
From source file:com.oetermann.imageclassifier.DescriptorExtractorWrapper.java
License:Open Source License
public Mat detectAndCompute(Mat image) { MatOfKeyPoint keypoint = new MatOfKeyPoint(); featureDetector.detect(image, keypoint); Mat descriptor = new Mat(); descriptorExtractor.compute(image, keypoint, descriptor); keypoint.release();//from w w w . j a va 2 s. c om return descriptor; }
From source file:com.opencv.video.VideoCaptureMain.java
private void jButtonPlayActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonPlayActionPerformed Thread t = new Thread() { @Override// w w w . java 2s . c om public void run() { MatToBufImg matToBufferedImageConverter = new MatToBufImg(); try { final VideoCapture videoCapture = new VideoCapture("D:\\colorTest.mp4"); // videoCapture = new VideoCapture(0); // Thread.sleep(3000); if (!videoCapture.isOpened()) { System.out.println("Video Alamad"); return; } double fps = videoCapture.get(5); System.out.println("FPS :" + fps); frame = new Mat(); Mat hsv_image = new Mat(); Mat thresholded = new Mat(); while (true) { boolean basarili = videoCapture.read(frame); if (!basarili) { System.out.println("Okunamyor"); break; } Imgproc.cvtColor(frame, hsv_image, Imgproc.COLOR_BGR2HSV); Core.inRange(hsv_image, new Scalar(170, 150, 60), new Scalar(179, 255, 255), thresholded); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); for (int i = 0; i < contours.size(); i++) { // System.out.println(Imgproc.contourArea(contours.get(i))); // if (Imgproc.contourArea(contours.get(i)) > 1 ){ Rect rect = Imgproc.boundingRect(contours.get(i)); kesit = frame.submat(rect); //System.out.println(rect.height); // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){ // System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width); Core.rectangle(frame, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255)); // Core.circle(webcam_image, new Point(rect.x+rect.height/2, rect.y+rect.width/2), i, new Scalar(0, 0, 255)); //} //} } matToBufferedImageConverter.setMatrix(frame, ".jpg"); g.drawImage(matToBufferedImageConverter.getBufferedImage(), 0, 0, 640, 480, null); } } catch (Exception e) { System.out.println("Sorun Burda"); } } }; t.start(); }
From source file:com.orange.documentare.core.image.Binarization.java
License:Open Source License
private static Mat getGreyscaleImage(Mat image) { Mat greyImage = new Mat(); Imgproc.cvtColor(image, greyImage, Imgproc.COLOR_RGB2GRAY); return greyImage; }
From source file:com.orange.documentare.core.image.connectedcomponents.ConnectedComponentsDetector.java
License:Open Source License
/** * Retrieve image connected components in image * @param imageMat/* w w w . jav a 2 s . c o m*/ * @param filter to apply to remove some connected components (based on size, etc) * @return connected components */ private ConnectedComponents detect(Mat imageMat, ConnectedComponentsFilter filter) { Mat binaryMat = Binarization.getFrom(imageMat); List<MatOfPoint> contours = new ArrayList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(binaryMat, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); return buildConnectedComponents(contours, filter); }
From source file:com.projecttango.examples.java.pointcloud.MainActivity.java
License:Open Source License
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_point_cloud); //IOIO//from w ww . j a v a 2 s. c o m pwm_speed = 1500; pwm_steering = 1500; mDetector = new ColorBlobDetector(); mSpectrum = new Mat(); mBlobColorRgba = new Scalar(255); CONTOUR_COLOR = new Scalar(255, 0, 0, 255); //To set color, find HSV values of desired color and convert each value to 1-255 scale //mDetector.setHsvColor(new Scalar(7, 196, 144)); // red //mDetector.setHsvColor(new Scalar(253.796875,222.6875,195.21875)); mDetector.setHsvColor(new Scalar(7.015625, 255.0, 239.3125)); //bucket orange mSurfaceView = (RajawaliSurfaceView) findViewById(R.id.gl_surface_view); textToSpeech = new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() { @Override public void onInit(int i) { textToSpeech.setLanguage(Locale.US); } }); mPointCloudManager = new TangoPointCloudManager(); mTangoUx = setupTangoUxAndLayout(); mRenderer = new Scene(this); setupRenderer(); //Set as topdown mRenderer.setTopDownView(); mRenderer.renderVirtualObjects(true); tangoCameraPreview = (TangoTextureCameraPreview) findViewById(R.id.cameraPreview); mapInfo = new MapInfo(); primaryColor = Color.parseColor("#FF3F51B5"); primaryDark = Color.parseColor("#FF303F9F"); mapInfo.setGrid(new int[GRID_SIZE][GRID_SIZE]); mapInfo.setCurrentCell(1, 3, 4); Window window = this.getWindow(); window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); //Will Error setStatusBarColor due to MIN API lvl at 19 window.setStatusBarColor(primaryDark); final Toolbar mToolBar = (Toolbar) findViewById(R.id.mainToolBar); //setSupportActionBar(mToolBar); //getSupportActionBar().setDisplayShowTitleEnabled(false); mToolBar.setTitleTextColor(Color.WHITE); mToolBar.setBackgroundColor(primaryColor); mToolBar.setTitle(""); Button startPointButton = (Button) findViewById(R.id.setStartPoint); startPointButton.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { // Do something in response to button click mRenderer.setStartPoint(getCurrentPose()); textToSpeech.speak("Start Point Set", TextToSpeech.QUEUE_FLUSH, null); Log.d("StartPoint", "Startpoint Set at: " + getCurrentPose()); } }); Button endPointButton = (Button) findViewById(R.id.setEndPoint); endPointButton.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { // Do something in response to button click mRenderer.setEndPoint(getCurrentPose()); Log.d("EndPoint", "Endpoint Set at: " + getCurrentPose()); } }); ToggleButton toggle = (ToggleButton) findViewById(R.id.togglePointCloud); toggle.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { mRenderer.setThirdPersonView(); mRenderer.drawLineBtwnBuckets(); } else { mRenderer.setTopDownView(); mRenderer.removeLineBtwnBuckets(); } } }); ToggleButton toggleMotors = (ToggleButton) findViewById(R.id.toggleMotors); toggleMotors.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { toast("Speed" + ": " + get_speed() + "Steer" + ": " + get_steering()); set_speed(1500 + 800); set_steering(1500); } else { toast("Speed" + ": " + get_speed() + "Steer" + ": " + get_steering()); set_speed(1500); set_steering(1500); } } }); mMotorbar = (SeekBar) findViewById(R.id.motorBar); // make seekbar object mMotorbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onStopTrackingTouch(SeekBar seekBar) { // TODO Auto-generated method stub toast("MotorVal: " + motorSliderVal); set_speed(1500 + motorSliderVal); set_steering(1500); } @Override public void onStartTrackingTouch(SeekBar seekBar) { // TODO Auto-generated method stub } @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { // TODO Auto-generated method stub motorSliderVal = progress; } }); DisplayManager displayManager = (DisplayManager) getSystemService(DISPLAY_SERVICE); if (displayManager != null) { displayManager.registerDisplayListener(new DisplayManager.DisplayListener() { @Override public void onDisplayAdded(int displayId) { } @Override public void onDisplayChanged(int displayId) { synchronized (this) { setDisplayRotation(); mMapView.setFloorPlanData(mRenderer.getFloorPlanData()); setAndroidOrientation(); } } @Override public void onDisplayRemoved(int displayId) { } }, null); if (hasPermission()) { if (null == savedInstanceState) { //Instansiates the TensorflowView //setFragment(); } } else { requestPermission(); } } }
From source file:com.randhirkumar.webcam.MainFrameForm.java
public void displayScreen() { Mat webcamImage = new Mat(); VideoCapture videoCapture = new VideoCapture(0); if (videoCapture.isOpened()) { while (true) { videoCapture.read(webcamImage); if (!webcamImage.empty()) { setSize(webcamImage.width() + 50, webcamImage.height() + 70); webcamImage = processor.detect(webcamImage); cameraPanel.convertMatToImage(webcamImage); cameraPanel.repaint();// w ww. j av a 2s .c om } else { System.out.println("Problem"); break; } } } }
From source file:com.raulh82vlc.face_detection_sample.opencv.domain.EyesDetectionInteractorImpl.java
License:Apache License
/** * <p>Build a template from a specific eye area previously substracted * uses detectMultiScale for this area, then uses minMaxLoc method to * detect iris from the detected eye</p> * * @param area Preformatted Area/*from w w w.jav a2 s . c o m*/ * @param size minimum iris size * @param grayMat image in gray * @param rgbaMat image in color * @param detectorEye Haar Cascade classifier * @return built template */ @NonNull private static Mat buildTemplate(Rect area, final int size, @NonNull Mat grayMat, @NonNull Mat rgbaMat, CascadeClassifier detectorEye) { Mat template = new Mat(); Mat graySubMatEye = grayMat.submat(area); MatOfRect eyes = new MatOfRect(); Rect eyeTemplate; detectorEye.detectMultiScale(graySubMatEye, eyes, 1.15, 2, Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE, new Size(EYE_MIN_SIZE, EYE_MIN_SIZE), new Size()); Rect[] eyesArray = eyes.toArray(); if (eyesArray.length > 0) { Rect e = eyesArray[0]; e.x = area.x + e.x; e.y = area.y + e.y; Rect eyeRectangle = getEyeArea((int) e.tl().x, (int) (e.tl().y + e.height * 0.4), e.width, (int) (e.height * 0.6)); graySubMatEye = grayMat.submat(eyeRectangle); Mat rgbaMatEye = rgbaMat.submat(eyeRectangle); Core.MinMaxLocResult minMaxLoc = Core.minMaxLoc(graySubMatEye); FaceDrawerOpenCV.drawIrisCircle(rgbaMatEye, minMaxLoc); Point iris = new Point(); iris.x = minMaxLoc.minLoc.x + eyeRectangle.x; iris.y = minMaxLoc.minLoc.y + eyeRectangle.y; eyeTemplate = getEyeArea((int) iris.x - size / 2, (int) iris.y - size / 2, size, size); FaceDrawerOpenCV.drawEyeRectangle(eyeTemplate, rgbaMat); template = (grayMat.submat(eyeTemplate)).clone(); } return template; }
From source file:com.raulh82vlc.face_detection_sample.opencv.presentation.FDOpenCVPresenter.java
License:Apache License
@Override public void onCameraViewStarted(int width, int height) { matrixGray = new Mat(); matrixRgba = new Mat(); isStopped = false;//from w w w . ja v a2 s. c o m eyesDetectionInteractor.setRunningStatus(true); }
From source file:com.seleniumtests.util.imaging.ImageDetector.java
License:Apache License
/** * Compute the rectangle where the searched picture is and the rotation angle between both images * Throw {@link ImageSearchException} if picture is not found * @return/* w w w . j ava 2s . co m*/ * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build */ public void detectCorrespondingZone() { Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR); FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF); MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint(); MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint(); surf.detect(objectImageMat, objectKeyPoints); surf.detect(sceneImageMat, sceneKeyPoints); DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF); Mat objectDescriptor = new Mat(); Mat sceneDescriptor = new Mat(); surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor); surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor); try { Mat outImage = new Mat(); Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage); String tempFile = File.createTempFile("img", ".png").getAbsolutePath(); writeComparisonPictureToFile(tempFile, outImage); } catch (IOException e) { } // http://stackoverflow.com/questions/29828849/flann-for-opencv-java DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED); MatOfDMatch matches = new MatOfDMatch(); if (objectKeyPoints.toList().isEmpty()) { throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: " + objectImage.getAbsolutePath()); } if (sceneKeyPoints.toList().isEmpty()) { throw new ImageSearchException( "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath()); } if (objectDescriptor.type() != CvType.CV_32F) { objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F); } if (sceneDescriptor.type() != CvType.CV_32F) { sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F); } matcher.match(objectDescriptor, sceneDescriptor, matches); double maxDist = 0; double minDist = 10000; for (int i = 0; i < objectDescriptor.rows(); i++) { double dist = matches.toList().get(i).distance; if (dist < minDist) { minDist = dist; } if (dist > maxDist) { maxDist = dist; } } logger.debug("-- Max dist : " + maxDist); logger.debug("-- Min dist : " + minDist); LinkedList<DMatch> goodMatches = new LinkedList<>(); MatOfDMatch gm = new MatOfDMatch(); for (int i = 0; i < objectDescriptor.rows(); i++) { if (matches.toList().get(i).distance < detectionThreshold) { goodMatches.addLast(matches.toList().get(i)); } } gm.fromList(goodMatches); Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch, Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS); if (goodMatches.isEmpty()) { throw new ImageSearchException("Cannot find matching zone"); } LinkedList<Point> objList = new LinkedList<>(); LinkedList<Point> sceneList = new LinkedList<>(); List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList(); List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList(); for (int i = 0; i < goodMatches.size(); i++) { objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt); sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt); } MatOfPoint2f obj = new MatOfPoint2f(); obj.fromList(objList); MatOfPoint2f scene = new MatOfPoint2f(); scene.fromList(sceneList); // Calib3d.RANSAC could be used instead of 0 Mat hg = Calib3d.findHomography(obj, scene, 0, 5); Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2); Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2); objectCorners.put(0, 0, new double[] { 0, 0 }); objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 }); objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() }); objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() }); Core.perspectiveTransform(objectCorners, sceneCorners, hg); // points of object Point po1 = new Point(objectCorners.get(0, 0)); Point po2 = new Point(objectCorners.get(1, 0)); Point po3 = new Point(objectCorners.get(2, 0)); Point po4 = new Point(objectCorners.get(3, 0)); // point of object in scene Point p1 = new Point(sceneCorners.get(0, 0)); // top left Point p2 = new Point(sceneCorners.get(1, 0)); // top right Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left logger.debug(po1); logger.debug(po2); logger.debug(po3); logger.debug(po4); logger.debug(p1); // top left logger.debug(p2); // top right logger.debug(p3); // bottom right logger.debug(p4); // bottom left if (debug) { try { // translate corners p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y }); p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y }); p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y }); p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y }); Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1); Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1); showResultingPicture(imgMatch); } catch (IOException e) { } } // check rotation angles checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4); // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270 reworkOnScenePoints(p1, p2, p3, p4); // check that aspect ratio of the detected height and width are the same checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4); recordDetectedRectangle(p1, p2, p3, p4); }