List of usage examples for org.opencv.core Mat get
public int get(int row, int col, double[] data)
From source file:OCV_HoughLines.java
License:Open Source License
private void showData(Mat lines, int imw, int imh) { // prepare the ResultsTable ResultsTable rt = OCV__LoadLibrary.GetResultsTable(true); // prepare the ROI Manager RoiManager roiMan = null;/*from w w w . j a va 2 s . c om*/ if (enAddRoi) { roiMan = OCV__LoadLibrary.GetRoiManager(true, true); } // show int num_lines = lines.rows(); float[] res = new float[2]; for (int i = 0; i < num_lines; i++) { lines.get(i, 0, res); float rho = res[0]; float theta = res[1]; double a = Math.cos(theta); double b = Math.sin(theta); double x0 = a * rho; double y0 = b * rho; double z = imw < imh ? imh : imw; double x1 = x0 + z * (-b); double y1 = y0 + z * a; double x2 = x0 - z * (-b); double y2 = y0 - z * (a); rt.incrementCounter(); rt.addValue("rho", rho); rt.addValue("theta", theta); rt.addValue("z", z); rt.addValue("x1", x1); rt.addValue("y1", y1); rt.addValue("x2", x2); rt.addValue("y2", y2); if (enAddRoi && (roiMan != null)) { Line roi = new Line(x1, y1, x2, y2); roiMan.addRoi(roi); } } rt.show("Results"); }
From source file:OCV_AdaptiveThreshold.java
License:Open Source License
@Override public void run(ImageProcessor ip) { int imw = ip.getWidth(); int imh = ip.getHeight(); // srcdst/*from ww w . j a v a2s . c om*/ byte[] srcdst_ar = (byte[]) ip.getPixels(); // mat Mat src_mat = new Mat(imh, imw, CvType.CV_8UC1); Mat dst_mat = new Mat(imh, imw, CvType.CV_8UC1); // run src_mat.put(0, 0, srcdst_ar); Imgproc.adaptiveThreshold(src_mat, dst_mat, maxValue, INT_ADAPTIVEMETHOD[indMethod], INT_THRESHOLDTYPE[indType], blockSize, subC); dst_mat.get(0, 0, srcdst_ar); }
From source file:MainShapeConversion.java
public static void main(String[] args) { try {/* ww w.j av a 2 s. c o m*/ System.loadLibrary(Core.NATIVE_LIBRARY_NAME); File input = new File("D://teste.png"); BufferedImage image = ImageIO.read(input); byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); Mat mat = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC3); mat.put(0, 0, data); Mat mat1 = new Mat(image.getWidth(), image.getHeight(), CvType.CV_8UC3); Core.flip(mat, mat1, -1); //-1 invert , 1 normal byte[] data1 = new byte[mat1.rows() * mat1.cols() * (int) (mat1.elemSize())]; mat1.get(0, 0, data1); BufferedImage image1 = new BufferedImage(mat1.cols(), mat1.rows(), 5); image1.getRaster().setDataElements(0, 0, mat1.cols(), mat1.rows(), data1); File output = new File("D://hsv.jpg"); ImageIO.write(image1, "jpg", output); } catch (Exception e) { System.out.println("Exception: " + e.getMessage()); } }
From source file:Video.java
public BufferedImage matToBufferedImage(Mat matrix) { int cols = matrix.cols(); int rows = matrix.rows(); int elemSize = (int) matrix.elemSize(); byte[] data = new byte[cols * rows * elemSize]; int type;/*ww w .j a v a2 s. c om*/ matrix.get(0, 0, data); switch (matrix.channels()) { case 1: type = BufferedImage.TYPE_BYTE_GRAY; break; case 3: type = BufferedImage.TYPE_3BYTE_BGR; // bgr to rgb byte b; for (int i = 0; i < data.length; i = i + 3) { b = data[i]; data[i] = data[i + 2]; data[i + 2] = b; } break; default: return null; } BufferedImage image = new BufferedImage(cols, rows, type); image.getRaster().setDataElements(0, 0, cols, rows, data); return image; }
From source file:Fiji_OpenCV.java
License:Creative Commons License
public void process(int[] pixels) { int channels = 3; byte[] buf = new byte[pixels.length * channels]; for (int i = 0; i < pixels.length; i++) { buf[i * channels] = (byte) (0x000000ff & (pixels[i])); buf[i * channels + 1] = (byte) (0x000000ff & (pixels[i] >>> 8)); buf[i * channels + 2] = (byte) (0x000000ff & (pixels[i] >>> 16)); }/* ww w . jav a 2 s.c o m*/ Mat image = new Mat(width, height, CvType.CV_8UC3); image.put(0, 0, buf); // Create a face detector from the cascade file in the resources // directory. CascadeClassifier faceDetector = new CascadeClassifier( getClass().getResource("/opencv/data/haarcascades/haarcascade_frontalface_alt2.xml").getPath()); // Detect faces in the image. // MatOfRect is a special container class for Rect. MatOfRect faceDetections = new MatOfRect(); faceDetector.detectMultiScale(image, faceDetections); System.out.println(String.format("Detected %s faces", faceDetections.toArray().length)); // Draw a bounding box around each face. for (Rect rect : faceDetections.toArray()) { Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0)); } image.get(0, 0, buf); for (int i = 0; i < pixels.length; i++) { pixels[i] = 0x80000000 + ((int) (buf[i * channels + 2]) << 16) + ((int) (buf[i * channels + 1]) << 8) + ((int) (buf[i * channels + 0])); } this.ip = new ColorProcessor(width, height, pixels); }
From source file:TelaTakeFoto.java
/** * Metodo createBufferedImage/*from ww w .j a va 2 s. c o m*/ * @param mat * @return */ public static BufferedImage createBufferedImage(Mat mat) { BufferedImage image = new BufferedImage(mat.width(), mat.height(), BufferedImage.TYPE_3BYTE_BGR); WritableRaster raster = image.getRaster(); DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer(); byte[] data = dataBuffer.getData(); mat.get(0, 0, data); return image; }
From source file:OCV__LoadLibrary.java
License:Open Source License
/** * a CV_8UC3 data of OpenCV -> a color data of ImageJ. * @param src_cv_8uc3 a CV_8UC3 data of OpenCV * @param dst_ar a color data of ImageJ (int[]) * @param imw width of image// w w w.j ava 2s . c om * @param imh height of image */ public static void mat2intarray(Mat src_cv_8uc3, int[] dst_ar, int imw, int imh) { if ((src_cv_8uc3.width() != imw) || (src_cv_8uc3.height() != imh) || dst_ar.length != imw * imh) { IJ.error("Wrong image size"); } for (int y = 0; y < imh; y++) { for (int x = 0; x < imw; x++) { byte[] dst_cv_8uc3_ele = new byte[3]; src_cv_8uc3.get(y, x, dst_cv_8uc3_ele); int b = (int) dst_cv_8uc3_ele[0]; int g = (int) dst_cv_8uc3_ele[1] << 8; int r = (int) dst_cv_8uc3_ele[2] << 16; int a = 0xff000000; dst_ar[x + imw * y] = b + g + r + a; } } }
From source file:OCV_WarpAffine.java
License:Open Source License
@Override public void run(ImageProcessor ip) { int imw = ip.getWidth(); int imh = ip.getHeight(); Size size = new Size((double) imw, (double) imh); Mat mat = new Mat(2, 3, CvType.CV_64FC1); for (int i = 0; i < 2; i++) { mat.put(i, 0, new double[] { Double.valueOf(rt.getStringValue(0, i).replaceAll("\"|'", "")) }); mat.put(i, 1, new double[] { Double.valueOf(rt.getStringValue(1, i).replaceAll("\"|'", "")) }); mat.put(i, 2, new double[] { Double.valueOf(rt.getStringValue(2, i).replaceAll("\"|'", "")) }); }//from w w w .j a va 2s. c o m if (ip.getBitDepth() == 8) { byte[] srcdst_ar = (byte[]) ip.getPixels(); Mat src_mat = new Mat(imh, imw, CvType.CV_8UC1); Mat dst_mat = new Mat(imh, imw, CvType.CV_8UC1); src_mat.put(0, 0, srcdst_ar); Imgproc.warpAffine(src_mat, dst_mat, mat, size, FLAGS_INT[flags_ind]); dst_mat.get(0, 0, srcdst_ar); } else if (ip.getBitDepth() == 16) { short[] srcdst_ar = (short[]) ip.getPixels(); Mat src_mat = new Mat(imh, imw, CvType.CV_16UC1); Mat dst_mat = new Mat(imh, imw, CvType.CV_16UC1); src_mat.put(0, 0, srcdst_ar); Imgproc.warpAffine(src_mat, dst_mat, mat, size, FLAGS_INT[flags_ind]); dst_mat.get(0, 0, srcdst_ar); } else if (ip.getBitDepth() == 24) { int[] srcdst_ar = (int[]) ip.getPixels(); Mat src_mat = new Mat(imh, imw, CvType.CV_8UC3); Mat dst_mat = new Mat(imh, imw, CvType.CV_8UC3); OCV__LoadLibrary.intarray2mat(srcdst_ar, src_mat, imw, imh); Imgproc.warpAffine(src_mat, dst_mat, mat, size, FLAGS_INT[flags_ind]); OCV__LoadLibrary.mat2intarray(dst_mat, srcdst_ar, imw, imh); } else if (ip.getBitDepth() == 32) { float[] srcdst_ar = (float[]) ip.getPixels(); Mat src_mat = new Mat(imh, imw, CvType.CV_32FC1); Mat dst_mat = new Mat(imh, imw, CvType.CV_32FC1); src_mat.put(0, 0, srcdst_ar); Imgproc.warpAffine(src_mat, dst_mat, mat, size, FLAGS_INT[flags_ind]); dst_mat.get(0, 0, srcdst_ar); } else { IJ.error("Wrong image format"); } }
From source file:OCV_Blur.java
License:Open Source License
@Override public void run(ImageProcessor ip) { if (ip.getBitDepth() == 8) { // srcdst int imw = ip.getWidth(); int imh = ip.getHeight(); byte[] srcdst_bytes = (byte[]) ip.getPixels(); // mat/*from w w w. ja v a 2 s . co m*/ Mat src_mat = new Mat(imh, imw, CvType.CV_8UC1); Mat dst_mat = new Mat(imh, imw, CvType.CV_8UC1); // run src_mat.put(0, 0, srcdst_bytes); Imgproc.blur(src_mat, dst_mat, ksize, new Point(-1, -1), INT_BORDERTYPE[indBorderType]); dst_mat.get(0, 0, srcdst_bytes); } else if (ip.getBitDepth() == 16) { // srcdst int imw = ip.getWidth(); int imh = ip.getHeight(); short[] srcdst_shorts = (short[]) ip.getPixels(); // mat Mat src_mat = new Mat(imh, imw, CvType.CV_16S); Mat dst_mat = new Mat(imh, imw, CvType.CV_16S); // run src_mat.put(0, 0, srcdst_shorts); Imgproc.blur(src_mat, dst_mat, ksize, new Point(-1, -1), INT_BORDERTYPE[indBorderType]); dst_mat.get(0, 0, srcdst_shorts); } else if (ip.getBitDepth() == 24) { // dst int imw = ip.getWidth(); int imh = ip.getHeight(); int[] srcdst_ints = (int[]) ip.getPixels(); // mat Mat src_mat = new Mat(imh, imw, CvType.CV_8UC3); Mat dst_mat = new Mat(imh, imw, CvType.CV_8UC3); // run OCV__LoadLibrary.intarray2mat(srcdst_ints, src_mat, imw, imh); Imgproc.blur(src_mat, dst_mat, ksize, new Point(-1, -1), INT_BORDERTYPE[indBorderType]); OCV__LoadLibrary.mat2intarray(dst_mat, srcdst_ints, imw, imh); } else if (ip.getBitDepth() == 32) { // srcdst int imw = ip.getWidth(); int imh = ip.getHeight(); float[] srcdst_floats = (float[]) ip.getPixels(); // mat Mat src_mat = new Mat(imh, imw, CvType.CV_32F); Mat dst_mat = new Mat(imh, imw, CvType.CV_32F); // run src_mat.put(0, 0, srcdst_floats); Imgproc.blur(src_mat, dst_mat, ksize, new Point(-1, -1), INT_BORDERTYPE[indBorderType]); dst_mat.get(0, 0, srcdst_floats); } else { IJ.error("Wrong image format"); } }
From source file:Ko.java
License:Open Source License
/** * Converts a cv::Mat to a BufferedImage so that it plays nicely * with Java./*from w w w.java2 s .c om*/ * @param m * The Mat to convert. * @return * A BufferedImage version of the Mat. */ private static BufferedImage toBufferedImage(Mat m) { int type = BufferedImage.TYPE_BYTE_GRAY; if (m.channels() > 1) { type = BufferedImage.TYPE_3BYTE_BGR; } int bufferSize = m.channels() * m.cols() * m.rows(); byte b[] = new byte[bufferSize]; m.get(0, 0, b); BufferedImage image = new BufferedImage(m.cols(), m.rows(), type); final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); System.arraycopy(b, 0, targetPixels, 0, b.length); return image; }