List of usage examples for org.opencv.core Mat Mat
public Mat(Mat m, Range rowRange, Range colRange)
From source file:com.Linguist.model.grayscaleClass.java
@Override public File imagePreprocessing(String image, String extnsn) { BufferedImage bImge = null;/* ww w .j a va 2 s. c o m*/ BufferedImage bImage2 = null; File grayscle = null; try { // loadOpenCV_Lib(); //String path = "opencv\\build\\java\\x64\\opencv_java300.dll"; FileInputStream fileName = new FileInputStream( "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + image); InputStream input = fileName; bImge = ImageIO.read(input); byte[] imgeByte = ((DataBufferByte) bImge.getRaster().getDataBuffer()).getData(); Mat mat1 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC3); mat1.put(0, 0, imgeByte); Mat mat2 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC1); Imgproc.cvtColor(mat1, mat2, Imgproc.COLOR_RGB2GRAY); byte[] imageData = new byte[mat2.rows() * mat2.cols() * (int) (mat2.elemSize())]; mat2.get(0, 0, imageData); bImage2 = new BufferedImage(mat2.cols(), mat2.rows(), BufferedImage.TYPE_BYTE_GRAY); bImage2.getRaster().setDataElements(0, 0, mat2.cols(), mat2.rows(), imageData); String extn = null; switch (extnsn) { case ".jpg": extn = "jpg"; break; case ".png": extn = "png"; break; case ".pdf": extn = "pdf"; break; case ".tiff": extn = "tif"; break; } //writing the grayscale image to the folder grayscle = new File( "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\grayscale" + "." + extn); ImageIO.write(bImage2, "jpg", grayscle); } catch (IOException ex) { System.out.println("" + ex.getMessage()); } catch (Exception ex) { Logger.getLogger(grayscaleClass.class.getName()).log(Level.SEVERE, null, ex); } return grayscle; }
From source file:com.Linguist.model.sharpeningClass.java
public File imagePreprocessing(String imgeNme, String extnsn) { File sharpen = null;/*from www .ja va 2s . co m*/ try { // System.loadLibrary(Core.NATIVE_LIBRARY_NAME); Mat source = Imgcodecs.imread( "C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\" + imgeNme, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE); Mat destination = new Mat(source.rows(), source.cols(), source.type()); Imgproc.equalizeHist(source, destination); Imgcodecs.imwrite("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\contrast.jpg", destination); sharpen = new File("C:\\Users\\User\\Documents\\GitHub\\Linguist\\web\\uploadedImage\\contrast.jpg"); } catch (Exception e) { System.out.println("error: " + e.getMessage()); } return sharpen; }
From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java
License:Open Source License
/** * ????????//w w w. j a v a 2 s.co m * @param lineEq ?????(ax+by=1) ??[angle][section] * @param points ?ArrayList * @param img ????null?????? * @return true:? false: */ private boolean calcSquare(StraightLineEquation lineEq[][], ArrayList<Point> points, Mat img) { //2?? Mat mat = new Mat(2, 2, CvType.CV_32F); mPointCenterX = 0.0f; mPointCenterY = 0.0f; int counter = 0; for (int ang0sec = 0; ang0sec < 2; ang0sec++) { mat.put(0, 0, lineEq[0][ang0sec].a); mat.put(0, 1, lineEq[0][ang0sec].b); for (int ang1sec = 0; ang1sec < 2; ang1sec++) { mat.put(1, 0, lineEq[1][ang1sec].a); mat.put(1, 1, lineEq[1][ang1sec].b); Mat matAns; try { matAns = mat.inv(); if (matAns == null) return false; } catch (Exception e) {//?????????? e.printStackTrace(); return false; } float x = (float) (matAns.get(0, 0)[0] + matAns.get(0, 1)[0] + mCenterX); float y = (float) (matAns.get(1, 0)[0] + matAns.get(1, 1)[0] + mCenterY); Point p = new Point(x, y); points.add(p); mPointCenterX += x; mPointCenterY += y; counter++; } } mPointCenterX /= (float) counter; mPointCenterY /= (float) counter; //???? Collections.sort(points, new PointComparator()); if (img != null) { Scalar color[] = new Scalar[4]; color[0] = new Scalar(0xff, 0x00, 0x00); color[1] = new Scalar(0x00, 0xff, 0x00); color[2] = new Scalar(0x00, 0x00, 0xff); color[3] = new Scalar(0xff, 0x00, 0xff); for (int i = 0; i < 4; i++) { Core.circle(img, points.get(i), 30, color[i], 5); } } if (MyDebug.DEBUG) { for (int i = 0; i < 4; i++) { Log.d(LOG_TAG, "point(" + i + ") = " + points.get(i).x + ":" + points.get(i).y); } } return true; }
From source file:com.oetermann.imageclassifier.Util.java
License:Open Source License
public static Mat loadMat(String path) { try {/*from www . jav a 2s . c om*/ int rows, cols, type; Object data; try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(path))) { rows = (int) ois.readObject(); cols = (int) ois.readObject(); type = (int) ois.readObject(); data = ois.readObject(); } Mat mat = new Mat(rows, cols, type); switch (type) { case CvType.CV_8S: case CvType.CV_8U: mat.put(0, 0, (byte[]) data); break; case CvType.CV_16S: case CvType.CV_16U: mat.put(0, 0, (short[]) data); break; case CvType.CV_32S: mat.put(0, 0, (int[]) data); break; case CvType.CV_32F: mat.put(0, 0, (float[]) data); break; case CvType.CV_64F: mat.put(0, 0, (double[]) data); break; } return mat; } catch (IOException | ClassNotFoundException | ClassCastException ex) { System.err.println("ERROR: Could not load mat from file: " + path); // Logger.getLogger(ImageClassifier.class.getName()).log(Level.SEVERE, null, ex); } return null; }
From source file:com.opencv.mouse.MouseMainFrame.java
private void jToggleButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jToggleButton1ActionPerformed try {//from w w w. j a va 2 s .c o m robot = new Robot(); } catch (AWTException e) { } t = new Thread() { public void run() { MatToBufImg matToBufferedImageConverter = new MatToBufImg(); //Utility class to convert Mat to Java's BufferedImage webCam = new VideoCapture(0); if (!webCam.isOpened()) { System.out.println("Kamera Ak Deil..!"); } else System.out.println("Kamera Ald --> " + webCam.toString()); Mat webcam_image = new Mat(480, 640, CvType.CV_8UC3); Mat hsv_image = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3); thresholded = new Mat(webcam_image.cols(), webcam_image.rows(), CvType.CV_8UC3, new Scalar(255, 255, 255)); if (webCam.isOpened()) { try { Thread.sleep(1000); } catch (InterruptedException ex) { } while (true) { try { webCam.read(webcam_image); } catch (Exception e) { System.out.println("Web Cam Kapal !"); } if (!webcam_image.empty()) { try { Thread.sleep(10); } catch (InterruptedException ex) { } // Mat inRangeResim = webcam_image.clone(); /* Mat inRangeResim = webcam_image.clone(); matToBufferedImageConverter.setMatrix(inRangeResim, ".jpg"); image =matToBufferedImageConverter.getBufferedImage(); Highgui.imwrite("D:\\bitirme.jpg", inRangeResim); */ // MatOfRect faceDetections = new MatOfRect(); Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV); //siyah hsv range 0 0 0 - 180 45 100 //hsvmavi Core.inRange(webcam_image, new Scalar(75,63,40), new Scalar(118,255,255), webcam_image); //rgb mavi // Core.inRange(webcam_image, new Scalar(50,0,0), new Scalar(255,0,0), webcam_image); //turuncu hsv Core.inRange(webcam_image, new Scalar(5,50,50), new Scalar(15,255,255), webcam_image); //Core.inRange(webcam_image, new Scalar(80,50,50), new Scalar(140,255,255), webcam_image); // Core.inRange(webcam_image, new Scalar(29,0,24), new Scalar(30,155,155), webcam_image); //hsv mavi // jSliderHmin.setValue(75); // jSliderSmin.setValue(63); // jSliderVmin.setValue(40); // jSliderHmax.setValue(118); // jSliderSmax.setValue(255); // jSliderVmax.setValue(255); // // jSliderHmin.setValue(0); // jSliderSmin.setValue(0); // jSliderVmin.setValue(0); // jSliderHmax.setValue(179); // jSliderSmax.setValue(39); // jSliderVmax.setValue(120); Core.inRange(hsv_image, new Scalar(100, 97, 206), new Scalar(120, 255, 255), thresholded); Imgproc.dilate(thresholded, thresholded, element); Imgproc.erode(thresholded, thresholded, element); Imgproc.dilate(thresholded, thresholded, element); Imgproc.erode(thresholded, thresholded, element); List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); Imgproc.findContours(thresholded, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); Imgproc.drawContours(thresholded, contours, -1, new Scalar(255.0, 255.0, 255.0), 5); for (int i = 0; i < contours.size(); i++) { // System.out.println(Imgproc.contourArea(contours.get(i))); // if (Imgproc.contourArea(contours.get(i)) > 1 ){ Rect rect = Imgproc.boundingRect(contours.get(i)); kesit = thresholded.submat(rect); //System.out.println(rect.height); // if (rect.height > 20 && rect.height <30 && rect.width < 30 && rect.width >20){ // System.out.println(rect.x +","+rect.y+","+rect.height+","+rect.width); Core.rectangle(webcam_image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 0, 255)); //} //} if (rect.height > 15 && rect.width > 15) { System.out.println(rect.x + "\n" + rect.y); Core.circle(webcam_image, new Point(rect.x, rect.y), i, new Scalar(0, 255, 0)); robot.mouseMove((int) (rect.x * 3), (int) (rect.y * 2.25)); } } // Imgproc.cvtColor(webcam_image, webcam_image, Imgproc.COLOR_HSV2BGR); // hsv_image.convertTo(hsv_image, CvType.CV_32F); // Imgproc.Canny(thresholded, thresholded, 10, 20); // Core.bitwise_and(thresholded, webcam_image, webcam_image); //ise yarar // Imgproc.cvtColor(thresholded, thresholded, Imgproc.COLOR_GRAY2BGR); // Core.bitwise_and(thresholded, webcam_image, webcam_image); // webcam_image.copyTo(hsv_image, thresholded); // System.out.println("<------------------------------>"); // System.out.println("BGR: " +webcam_image.channels()+" Size : "+webcam_image.size()); // System.out.println("HSV :"+hsv_image.channels()+" Size: "+hsv_image.size()); // System.out.println("Thresold :"+thresholded.channels()+" Size : "+thresholded.size()); // System.out.println("<------------------------------>"); // matToBufferedImageConverter.setMatrix(webcam_image, ".jpg"); image = matToBufferedImageConverter.getBufferedImage(); g.drawImage(image, 0, 0, webcam_image.cols(), webcam_image.rows(), null); } else { System.out.println("Grnt yok!"); break; } } // webCam.release(); } } }; threadDurum = true; t.start(); }
From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java
License:Open Source License
public static Mat resize(Mat mat, int bytesSizeTarget) { long matBytesCount = matBytesCount(mat); float ratio = (float) matBytesCount / bytesSizeTarget; double sqrtRatio = Math.sqrt(ratio); int newWidth = (int) (mat.size().width / sqrtRatio); int newHeigth = (int) (mat.size().height / sqrtRatio); Mat newMat = new Mat(newHeigth, newWidth, mat.type()); Imgproc.resize(mat, newMat, newMat.size(), sqrtRatio, sqrtRatio, Imgproc.INTER_LANCZOS4); return newMat; }
From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java
License:Open Source License
private static Mat bytesToMat(byte[] bytes, int rows, int columns, boolean raw) { int simDocLineExtra = raw ? 1 : 0; Mat mat = new Mat(rows, columns, CvType.CV_8UC1); byte[] dat = new byte[1]; for (int y = 0; y < rows; y++) { for (int x = 0; x < columns; x++) { dat[0] = bytes[y * (columns + simDocLineExtra) + x]; mat.put(y, x, dat);// w w w . j a v a 2 s . c o m } } return mat; }
From source file:com.projectcs2103t.openglestest.OpenGLES20Activity.java
License:Apache License
@Override public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat rgba = inputFrame.rgba();/*from ww w.ja v a2 s.c o m*/ float projection[] = mCameraProjectionAdapter.getProjectionGL(); Mat CameraMat = mCameraProjectionAdapter.getCVCameraMat(); Mat DistortionMat = mCameraProjectionAdapter.getCVDistortionMat(); Mat ModelViewMat = new Mat(4, 4, CvType.CV_64FC1); int detected = nl.processFrame(rgba.getNativeObjAddr(), CameraMat.getNativeObjAddr(), DistortionMat.getNativeObjAddr(), ModelViewMat.getNativeObjAddr()); float mGLModelView[] = null; if (detected == 1) { mGLModelView = new float[16]; mGLModelView[0] = (float) ModelViewMat.get(0, 0)[0]; mGLModelView[1] = (float) ModelViewMat.get(0, 1)[0]; mGLModelView[2] = (float) ModelViewMat.get(0, 2)[0]; mGLModelView[3] = (float) ModelViewMat.get(0, 3)[0]; mGLModelView[4] = (float) ModelViewMat.get(1, 0)[0]; mGLModelView[5] = (float) ModelViewMat.get(1, 1)[0]; mGLModelView[6] = (float) ModelViewMat.get(1, 2)[0]; mGLModelView[7] = (float) ModelViewMat.get(1, 3)[0]; mGLModelView[8] = (float) ModelViewMat.get(2, 0)[0]; mGLModelView[9] = (float) ModelViewMat.get(2, 1)[0]; mGLModelView[10] = (float) ModelViewMat.get(2, 2)[0]; mGLModelView[11] = (float) ModelViewMat.get(2, 3)[0]; mGLModelView[12] = (float) ModelViewMat.get(3, 0)[0]; mGLModelView[13] = (float) ModelViewMat.get(3, 1)[0]; mGLModelView[14] = (float) ModelViewMat.get(3, 2)[0]; mGLModelView[15] = (float) ModelViewMat.get(3, 3)[0]; //showMatrices(rgba, ModelViewMat); } mCameraProjectionAdapter.setModelViewGL(mGLModelView); Imgproc.putText(rgba, mCameraProjectionAdapter.toString(), new Point(50, 50), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0)); Imgproc.putText(rgba, mGLView.toString(), new Point(50, 75), Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(0, 255, 0)); return rgba; }
From source file:com.projecttango.examples.java.pointcloud.MainActivity.java
License:Open Source License
/** * Set up the callback listeners for the Tango Service and obtain other parameters required * after Tango connection./* w w w . ja v a 2 s .com*/ * Listen to updates from the Point Cloud and Tango Events and Pose. */ private void startupTango() { final ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>(); framePairs.add(new TangoCoordinateFramePair(TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE, TangoPoseData.COORDINATE_FRAME_DEVICE)); mTango.connectListener(framePairs, new Tango.TangoUpdateCallback() { @Override public void onPoseAvailable(TangoPoseData pose) { // Passing in the pose data to UX library produce exceptions. if (mTangoUx != null) { mTangoUx.updatePoseStatus(pose.statusCode); } mapPos = pose; /* TANGO POSE UPDATE FOR MAP HERE */ //mapInfo.setCurrentCell(pose); } @Override public void onPointCloudAvailable(TangoPointCloudData pointCloud) { if (mTangoUx != null) { mTangoUx.updatePointCloud(pointCloud); } mPointCloudManager.updatePointCloud(pointCloud); final double currentTimeStamp = pointCloud.timestamp; final double pointCloudFrameDelta = (currentTimeStamp - mPointCloudPreviousTimeStamp) * SECS_TO_MILLISECS; mPointCloudPreviousTimeStamp = currentTimeStamp; final double averageDepth = getAveragedDepth(pointCloud.points, pointCloud.numPoints); mPointCloudTimeToNextUpdate -= pointCloudFrameDelta; if (mPointCloudTimeToNextUpdate < 0.0) { mPointCloudTimeToNextUpdate = UPDATE_INTERVAL_MS; final String pointCountString = Integer.toString(pointCloud.numPoints); runOnUiThread(new Runnable() { @Override public void run() { //mPointCountTextView.setText(pointCountString); //mAverageZTextView.setText(FORMAT_THREE_DECIMAL.format(averageDepth)); } }); } } @Override public void onFrameAvailable(int cameraId) { // We are not using onFrameAvailable for this application. if (cameraId == TangoCameraIntrinsics.TANGO_CAMERA_COLOR) { tangoCameraPreview.onFrameAvailable(); bm[0] = tangoCameraPreview.getBitmap(); frameCount++; Log.d("FPSTango", ": " + frameCount); Bitmap openCVBitmap = tangoCameraPreview.getBitmap(); tmp = new Mat(openCVBitmap.getWidth(), openCVBitmap.getHeight(), CvType.CV_8UC4); mDetector.process(tmp); //////////////////////// List<MatOfPoint> contours = mDetector.getContours(); // Log.e("rescue robotics", "Contours count: " + contours.size()); Imgproc.drawContours(tmp, contours, -1, CONTOUR_COLOR); Mat colorLabel = tmp.submat(4, 68, 4, 68); colorLabel.setTo(mBlobColorRgba); Mat spectrumLabel = tmp.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols()); mSpectrum.copyTo(spectrumLabel); if (mDetector.blobsDetected() > 0) { toast("I see a Blob!"); } if (frameCount == 30) { frameCount = 0; scan(tangoCameraPreview.getBitmap()); } } } @Override public void onTangoEvent(TangoEvent event) { if (mTangoUx != null) { mTangoUx.updateTangoEvent(event); } } }); }
From source file:com.raulh82vlc.face_detection_sample.opencv.domain.EyesDetectionInteractorImpl.java
License:Apache License
/** * Matches concrete point of the eye by using template with TM_SQDIFF_NORMED *///from w w w . j a v a2 s .c o m private static void matchEye(Rect area, Mat builtTemplate, Mat matrixGray, Mat matrixRGBA) { Point matchLoc; try { // when there is not builtTemplate we skip it if (builtTemplate.cols() == 0 || builtTemplate.rows() == 0) { return; } Mat submatGray = matrixGray.submat(area); int cols = submatGray.cols() - builtTemplate.cols() + 1; int rows = submatGray.rows() - builtTemplate.rows() + 1; Mat outputTemplateMat = new Mat(cols, rows, CvType.CV_8U); Imgproc.matchTemplate(submatGray, builtTemplate, outputTemplateMat, Imgproc.TM_SQDIFF_NORMED); Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(outputTemplateMat); // when is difference in matching methods, the best match is max / min value matchLoc = minMaxLocResult.minLoc; Point matchLocTx = new Point(matchLoc.x + area.x, matchLoc.y + area.y); Point matchLocTy = new Point(matchLoc.x + builtTemplate.cols() + area.x, matchLoc.y + builtTemplate.rows() + area.y); FaceDrawerOpenCV.drawMatchedEye(matchLocTx, matchLocTy, matrixRGBA); } catch (Exception e) { e.printStackTrace(); } }