Example usage for org.opencv.core Mat rows

List of usage examples for org.opencv.core Mat rows

Introduction

In this page you can find the example usage for org.opencv.core Mat rows.

Prototype

public int rows() 

Source Link

Usage

From source file:Retrive.java

public Mat histo(Mat imgs) {
    Vector<Mat> bgr_planes = new Vector<>();
    Core.split(imgs, bgr_planes);/*from   w  w w  . ja va2  s.  c om*/

    MatOfInt histSize = new MatOfInt(256);
    final MatOfFloat histRange = new MatOfFloat(0f, 256f);
    boolean accumulate = false;
    Mat b_hist = new Mat();
    int hist_w = 512;
    int hist_h = 600;
    //long bin_w;
    //bin_w = Math.round((double) (hist_w / 256));

    Mat histImage = new Mat(hist_h, hist_w, CvType.CV_8UC3);

    Imgproc.calcHist(bgr_planes, new MatOfInt(0), new Mat(), b_hist, histSize, histRange, accumulate);
    Core.normalize(b_hist, b_hist, 0, histImage.rows(), Core.NORM_MINMAX, -1, new Mat());
    return b_hist;
}

From source file:MainDilation.java

public static void main(String[] args) {

    try {/*from   w w  w. java2 s .  c  om*/

        //int erosion_size = 5;
        int dilation_size = 5;

        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

        Mat source = Highgui.imread("D://teste.png", Highgui.CV_LOAD_IMAGE_COLOR);

        Mat destination = new Mat(source.rows(), source.cols(), source.type());

        destination = source;

        Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT,
                new Size(2 * dilation_size + 1, 2 * dilation_size + 1));

        Imgproc.dilate(source, destination, element);

        Highgui.imwrite("D://Dilation.jpg", destination);

    } catch (Exception e) {
        System.out.println("Exception: " + e.getMessage());
    }

}

From source file:MainTextWatermark.java

public static void main(String[] args) {

    try {/*w ww.  j a  va 2  s.c o m*/

        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

        Mat source = Highgui.imread("D://teste.png", Highgui.CV_LOAD_IMAGE_COLOR);

        Mat destination = new Mat(source.rows(), source.cols(), source.type());

        Core.putText(source, "Tutorialspoint.com by DAC",
                new Point(source.rows() / 2, (source.cols() / 15 * 11)), //Posio do texto na tela
                Core.FONT_HERSHEY_PLAIN, new Double(1.1), new Scalar(150));

        Highgui.imwrite("D://Watermarked.jpg", source);

    } catch (Exception e) {
        System.out.println("Exception:" + e.getMessage());
    }

}

From source file:OCV_GetAffineTransform.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    MatOfPoint2f matPt_src = new MatOfPoint2f();
    MatOfPoint2f matPt_dst = new MatOfPoint2f();
    matPt_src.fromList(lstPt_src);/* w  w w  .  ja v  a 2s. c o  m*/
    matPt_dst.fromList(lstPt_dst);

    Mat mat = Imgproc.getAffineTransform(matPt_src, matPt_dst);

    if (mat == null || mat.rows() <= 0 || mat.cols() <= 0) {
        IJ.showMessage("Output is null or error");
        return;
    }

    ResultsTable rt = OCV__LoadLibrary.GetResultsTable(true);

    for (int i = 0; i < 2; i++) {
        rt.incrementCounter();
        rt.addValue("Column01", String.valueOf(mat.get(i, 0)[0]));
        rt.addValue("Column02", String.valueOf(mat.get(i, 1)[0]));
        rt.addValue("Column03", String.valueOf(mat.get(i, 2)[0]));
    }

    rt.show("Results");
}

From source file:OCV_HoughLinesP.java

License:Open Source License

private void showData(Mat lines) {
    // prepare the ResultsTable
    ResultsTable rt = OCV__LoadLibrary.GetResultsTable(true);

    // prepare the ROI Manager
    RoiManager roiMan = null;// w  w w.  ja  va  2  s .  c  om

    if (enAddRoi) {
        roiMan = OCV__LoadLibrary.GetRoiManager(true, true);
    }

    // show
    int num_lines = lines.rows();
    int[] line = new int[4];

    for (int i = 0; i < num_lines; i++) {
        lines.get(i, 0, line);

        int x1 = line[0];
        int y1 = line[1];
        int x2 = line[2];
        int y2 = line[3];

        rt.incrementCounter();
        rt.addValue("x1", x1);
        rt.addValue("y1", y1);
        rt.addValue("x2", x2);
        rt.addValue("y2", y2);

        if (enAddRoi && (roiMan != null)) {
            Line roi = new Line(x1, y1, x2, y2);
            roiMan.addRoi(roi);
        }
    }

    rt.show("Results");
}

From source file:MainPyramids.java

public static void main(String[] args) {

    try {//from ww w.ja  v  a  2  s  .c  o m

        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        System.out.println("Verso OPENCV: " + Core.VERSION);

        //pyramids UP

        Mat source = Highgui.imread("D:\\teste.png", Highgui.CV_LOAD_IMAGE_COLOR);

        Mat destinationUp = new Mat(source.rows() * 2, source.cols() * 2, source.type());

        destinationUp = source;

        Imgproc.pyrUp(source, destinationUp, new Size(source.cols() * 2, source.rows() * 2),
                Imgproc.BORDER_DEFAULT);

        Highgui.imwrite("D://pyrUp.jpg", destinationUp);

        //pyramids DOWN

        source = Highgui.imread("D://teste.png", Highgui.CV_LOAD_IMAGE_COLOR);

        Mat destinationDown = new Mat(source.rows() / 2, source.cols() / 2, source.type());

        destinationDown = source;

        Imgproc.pyrDown(source, destinationDown, new Size(source.cols() / 2, source.rows() / 2));

        Highgui.imwrite("pyrDown.jpg", destinationDown);

    } catch (Exception e) {
        System.out.println("Exception: " + e.getMessage());
    }
}

From source file:airhockeyjava.detection.PS3EyeFrameGrabber.java

License:Open Source License

/**
 * Convert matrix into an image//from  www  .j av a  2s. co  m
 * 
 * @param m
 *            matrix to be converted
 * @return Converted BufferedImage
 */
private static BufferedImage toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
        // System.out.println("3 Channel BufferedImage");
    }
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:arlocros.ComputePose.java

License:Apache License

public boolean computePose(Mat rvec, Mat tvec, Mat image2) throws NyARException, FileNotFoundException {
    // convert image to NyAR style for processing
    final INyARRgbRaster imageRaster = NyARImageHelper.createFromMat(image2);

    // create new marker system configuration
    i_config = new NyARMarkerSystemConfig(i_param);
    markerSystemState = new NyARMarkerSystem(i_config);
    // Create wrapper that passes cam pictures to marker system
    cameraSensorWrapper = new NyARSensor(i_screen_size);
    ids = new int[markerPatterns.size()];
    patternmap = new HashMap<>();
    for (int i = 0; i < markerPatterns.size(); i++) {
        // create marker description from pattern file and add to marker
        // system
        ids[i] = markerSystemState.addARMarker(arCodes.get(i), 25, markerConfig.getMarkerSize());
        patternmap.put(ids[i], markerPatterns.get(i));
    }//  w  ww  .j  a  v  a  2 s. com

    cameraSensorWrapper.update(imageRaster);
    markerSystemState.update(cameraSensorWrapper);

    // init 3D point list
    final List<Point3> points3dlist = new ArrayList<>();
    final List<Point> points2dlist = new ArrayList<>();

    for (final int id : ids) {
        // process only if this marker has been detected
        if (markerSystemState.isExistMarker(id) && markerSystemState.getConfidence(id) > 0.7) {
            // read and add 2D points
            final NyARIntPoint2d[] vertex2d = markerSystemState.getMarkerVertex2D(id);
            Point p = new Point(vertex2d[0].x, vertex2d[0].y);
            points2dlist.add(p);
            p = new Point(vertex2d[1].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[2].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[3].x, vertex2d[3].y);
            points2dlist.add(p);

            //            final MatOfPoint mop = new MatOfPoint();
            //            mop.fromList(points2dlist);
            //            final List<MatOfPoint> pts = new ArrayList<>();
            //            pts.add(mop);

            // read and add corresponding 3D points
            points3dlist.addAll(markerConfig.create3dpointlist(patternmap.get(id)));

            if (visualization) {
                // draw red rectangle around detected marker
                Core.rectangle(image2, new Point(vertex2d[0].x, vertex2d[0].y),
                        new Point(vertex2d[2].x, vertex2d[2].y), new Scalar(0, 0, 255));
                final String markerFile = patternmap.get(id).replaceAll(".*4x4_", "").replace(".patt", "");
                Core.putText(image2, markerFile,
                        new Point((vertex2d[2].x + vertex2d[0].x) / 2.0, vertex2d[0].y - 5), 4, 1,
                        new Scalar(250, 0, 0));
            }
        }

    }
    // load 2D and 3D points to Mats for solvePNP
    final MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(points3dlist);
    final MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(points2dlist);

    if (visualization) {
        // show image with markers detected
        Imshow.show(image2);
    }

    // do not call solvePNP with empty intput data (no markers detected)
    if (points2dlist.size() == 0) {
        objectPoints.release();
        imagePoints.release();
        return false;
    }

    // uncomment these lines if using RANSAC-based pose estimation (more
    // shaking)
    Mat inliers = new Mat();

    Calib3d.solvePnPRansac(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec, false, 300, 5, 16,
            inliers, Calib3d.CV_P3P);
    ArMarkerPoseEstimator.getLog()
            .info("Points detected: " + points2dlist.size() + " inliers: " + inliers.size());

    objectPoints.release();
    imagePoints.release();

    // avoid publish zero pose if localization failed
    if (inliers.rows() == 0) {
        inliers.release();
        return false;
    }

    inliers.release();
    return true;
}

From source file:arlocros.Imshow.java

License:Apache License

/**
 * @param opencvImage//  w  w  w . j  ava  2s . c  o  m
 */
public static void show(Mat opencvImage) {

    Dimension frameSize = new Dimension(opencvImage.rows(), opencvImage.cols());
    if (frame == null) {
        frame = new Imshow("", frameSize.height, frameSize.width);
        frame.Window.setVisible(true);

        frame.Window.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
        if (frame.SizeCustom) {
            Imgproc.resize(opencvImage, opencvImage, new Size(frame.Height, frame.Width));
        }
    }
    BufferedImage bufImage = null;
    try {

        int type = BufferedImage.TYPE_BYTE_GRAY;
        if (opencvImage.channels() > 1) {
            type = BufferedImage.TYPE_3BYTE_BGR;
        }
        int bufferSize = opencvImage.channels() * opencvImage.cols() * opencvImage.rows();
        byte[] b = new byte[bufferSize];
        opencvImage.get(0, 0, b);
        BufferedImage bufferedImage = new BufferedImage(opencvImage.cols(), opencvImage.rows(), type);
        final byte[] targetPixels = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
        System.arraycopy(b, 0, targetPixels, 0, b.length);
        bufImage = bufferedImage;
        frame.image.setImage(bufImage);
        frame.Window.pack();
        frame.label.updateUI();
        //frame.Window.setVisible(true);
    } catch (RuntimeException e) {
        logger.info("Exception while visualizing.", e);
    }
}

From source file:at.ac.tuwien.caa.docscan.camera.CameraPreview.java

License:Open Source License

public boolean isFrameSame(Mat frame1, Mat frame2) {

    Mat tmp1 = new Mat(frame1.rows(), frame1.cols(), CvType.CV_8UC1);
    Imgproc.cvtColor(frame1, tmp1, Imgproc.COLOR_RGB2GRAY);

    Mat tmp2 = new Mat(frame2.rows(), frame2.cols(), CvType.CV_8UC1);
    Imgproc.cvtColor(frame2, tmp2, Imgproc.COLOR_RGB2GRAY);

    Mat subtractResult = new Mat(frame2.rows(), frame2.cols(), CvType.CV_8UC1);
    Core.absdiff(frame1, frame2, subtractResult);
    Imgproc.threshold(subtractResult, subtractResult, 50, 1, Imgproc.THRESH_BINARY);
    Scalar sumDiff = Core.sumElems(subtractResult);
    double diffRatio = sumDiff.val[0] / (frame1.cols() * frame2.rows());

    return diffRatio < .05;

}