Example usage for org.opencv.core Mat dump

List of usage examples for org.opencv.core Mat dump

Introduction

In this page you can find the example usage for org.opencv.core Mat dump.

Prototype

public String dump() 

Source Link

Usage

From source file:org.ar.rubik.CameraCalibration.java

License:Open Source License

/**
 * Get OpenCV Camera Matrix/* w  ww  .j  a  v a  2s . c om*/
 * 
 * This matrix represents the intrinsic properties of the camera.
 * Matrix is basically:
 * 
 *    |   Fx    0    Cx  |
 *    |    0   Fy    Cy  |
 *    |    0    0     1  |
 *    
 *    Fx := X Focal Length
 *    Fy := Y Focal Length
 *    Cx := X Optical Center
 *    Cy := Y Optical Center
 * 
 * 
 * @return
 */
public Mat getOpenCVCameraMatrix(int width, int height) {

    Log.v(Constants.TAG_CAL, "CameraCalibration.getOpenCVMatrix(): width=" + width + " height=" + height);

    double focalLengthXPixels = width / (2.0 * Math.tan(0.5 * fovX));
    double focalLengthYPixels = height / (2.0 * Math.tan(0.5 * fovY));

    Mat cameraMatrix = new Mat(3, 3, CvType.CV_64FC1);
    cameraMatrix.put(0, 0, focalLengthXPixels); // should be X focal length in pixels.
    cameraMatrix.put(0, 1, 0.0);
    cameraMatrix.put(0, 2, width / 2.0);
    cameraMatrix.put(1, 0, 0.0);
    cameraMatrix.put(1, 1, focalLengthYPixels); // should be Y focal length in pixels.
    cameraMatrix.put(1, 2, height / 2.0);
    cameraMatrix.put(2, 0, 0.0);
    cameraMatrix.put(2, 1, 0.0);
    cameraMatrix.put(2, 2, 1.0);

    Log.v(Constants.TAG_CAL, "Android Camera Calibration Matrix: ");
    Log.v(Constants.TAG_CAL, cameraMatrix.dump());

    //       =+= From Android Camera Calibration App at resolution 1920 x 1080
    //       cameraMatrix.put(0, 0, 1686.1);
    //       cameraMatrix.put(0, 1, 0.0);
    //       cameraMatrix.put(0, 2, 959.5);
    //       cameraMatrix.put(1, 0, 0.0);
    //       cameraMatrix.put(1, 1, 1686.1);
    //       cameraMatrix.put(1, 2, 539.5);
    //       cameraMatrix.put(2, 0, 0.0);
    //       cameraMatrix.put(2, 1, 0.0);
    //       cameraMatrix.put(2, 2, 1.0);
    //
    //       Log.v(Constants.TAG_CAL, "Camera Calibration App Matrix: ");
    //       Log.v(Constants.TAG_CAL, cameraMatrix.dump());

    return cameraMatrix;
}

From source file:org.ar.rubik.CubeReconstructor.java

License:Open Source License

/**
 * Pose Estimation//w  w w.  jav  a2 s .com
 * 
 * Deduce real world cube coordinates and rotation
 * 
 * @param rubikFace
 * @param image 
 * @param stateModel 
 */
public void poseEstimation(RubikFace rubikFace, Mat image, StateModel stateModel) {

    if (rubikFace == null)
        return;

    if (rubikFace.faceRecognitionStatus != FaceRecognitionStatusEnum.SOLVED)
        return;

    LeastMeansSquare lmsResult = rubikFace.lmsResult;

    if (lmsResult == null)
        return;

    // OpenCV Pose Estimate requires at least four points.
    if (rubikFace.rhombusList.size() <= 4)
        return;

    // List of real world point and screen points that correspond.
    List<Point3> objectPointsList = new ArrayList<Point3>(9);
    List<Point> imagePointsList = new ArrayList<Point>(9);

    // Create list of image (in 2D) and object (in 3D) points.
    // Loop over Rubik Face Tiles/Rhombi
    for (int n = 0; n < 3; n++) {
        for (int m = 0; m < 3; m++) {

            Rhombus rhombus = rubikFace.faceRhombusArray[n][m];

            // Only use if Rhombus was non null.
            if (rhombus != null) {

                // Obtain center of Rhombus in screen image coordinates
                // Convention:
                //  o X is zero on the left, and increases to the right.
                //  o Y is zero on the top and increases downward.
                Point imagePoint = new Point(rhombus.center.x, rhombus.center.y);
                imagePointsList.add(imagePoint);

                // N and M are actual not conceptual (as in design doc).
                int mm = 2 - n;
                int nn = 2 - m;
                // above now matches design doc.
                // that is:
                //  o the nn vector is to the right and upwards.
                //  o the mm vector is to the left and upwards.

                // Calculate center of Tile in OpenCV World Space Coordinates
                // Convention:
                //  o X is zero in the center, and increases to the left.
                //  o Y is zero in the center and increases downward.
                //  o Z is zero (at the world coordinate origin) and increase away for the camera.
                float x = (1 - mm) * 0.66666f;
                float y = -1.0f;
                float z = -1.0f * (1 - nn) * 0.666666f;
                Point3 objectPoint = new Point3(x, y, z);
                objectPointsList.add(objectPoint);
            }
        }
    }

    // Cast image point list into OpenCV Matrix.
    MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(imagePointsList);

    // Cast object point list into OpenCV Matrix.
    MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(objectPointsList);

    Mat cameraMatrix = stateModel.cameraParameters.getOpenCVCameraMatrix();
    MatOfDouble distCoeffs = new MatOfDouble();
    Mat rvec = new Mat();
    Mat tvec = new Mat();

    //      Log.e(Constants.TAG, "Image Points: " + imagePoints.dump());
    //      Log.e(Constants.TAG, "Object Points: " + objectPoints.dump());

    //      boolean result = 
    Calib3d.solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec);

    Log.v(Constants.TAG, String.format("Open CV Rotation Vector x=%4.2f y=%4.2f z=%4.2f", rvec.get(0, 0)[0],
            rvec.get(1, 0)[0], rvec.get(2, 0)[0]));

    // Convert from OpenCV to OpenGL World Coordinates
    x = +1.0f * (float) tvec.get(0, 0)[0];
    y = -1.0f * (float) tvec.get(1, 0)[0];
    z = -1.0f * (float) tvec.get(2, 0)[0];

    // =+= Add manual offset correction to translation  
    x += MenuAndParams.xTranslationOffsetParam.value;
    y += MenuAndParams.yTranslationOffsetParam.value;
    z += MenuAndParams.zTranslationOffsetParam.value;

    // Convert Rotation Vector from OpenCL polarity axes definition to OpenGL definition
    rvec.put(1, 0, -1.0f * rvec.get(1, 0)[0]);
    rvec.put(2, 0, -1.0f * rvec.get(2, 0)[0]);

    // =+= Add manual offset correction to Rotation
    rvec.put(0, 0, rvec.get(0, 0)[0] + MenuAndParams.xRotationOffsetParam.value * Math.PI / 180.0); // X rotation
    rvec.put(1, 0, rvec.get(1, 0)[0] + MenuAndParams.yRotationOffsetParam.value * Math.PI / 180.0); // Y rotation
    rvec.put(2, 0, rvec.get(2, 0)[0] + MenuAndParams.zRotationOffsetParam.value * Math.PI / 180.0); // Z rotation

    // Create an OpenCV Rotation Matrix from a Rotation Vector
    Mat rMatrix = new Mat(4, 4, CvType.CV_32FC2);
    Calib3d.Rodrigues(rvec, rMatrix);
    Log.v(Constants.TAG, "Rodrigues Matrix: " + rMatrix.dump());

    /*
     * Create an OpenGL Rotation Matrix
     * Notes:
     *   o  OpenGL is in column-row order (correct?).
     *   o  OpenCV Rodrigues Rotation Matrix is 3x3 where OpenGL Rotation Matrix is 4x4.
     */

    // Initialize all Rotational Matrix elements to zero.
    for (int i = 0; i < 16; i++)
        rotationMatrix[i] = 0.0f; // Initialize to zero

    // Initialize element [3,3] to 1.0: i.e., "w" component in homogenous coordinates
    rotationMatrix[3 * 4 + 3] = 1.0f;

    // Copy OpenCV matrix to OpenGL matrix element by element.
    for (int r = 0; r < 3; r++)
        for (int c = 0; c < 3; c++)
            rotationMatrix[r + c * 4] = (float) (rMatrix.get(r, c)[0]);

    // Diagnostics
    for (int r = 0; r < 4; r++)
        Log.v(Constants.TAG, String.format("Rotation Matrix  r=%d  [%5.2f  %5.2f  %5.2f  %5.2f]", r,
                rotationMatrix[r + 0], rotationMatrix[r + 4], rotationMatrix[r + 8], rotationMatrix[r + 12]));

    //      Log.e(Constants.TAG, "Result: " + result);
    //      Log.e(Constants.TAG, "Camera: " + cameraMatrix.dump());
    //      Log.e(Constants.TAG, "Rotation: " + rvec.dump());
    //      Log.e(Constants.TAG, "Translation: " + tvec.dump());

    //      // Reporting in OpenGL World Coordinates
    //      Core.rectangle(image, new Point(0, 50), new Point(1270, 150), Constants.ColorBlack, -1);
    //      Core.putText(image, String.format("Translation  x=%4.2f y=%4.2f z=%4.2f", x, y, z), new Point(50, 100), Constants.FontFace, 3, Constants.ColorWhite, 3);
    //      Core.putText(image, String.format("Rotation     x=%4.0f y=%4.0f z=%4.0f", cubeXrotation, cubeYrotation, cubeZrotation), new Point(50, 150), Constants.FontFace, 3, Constants.ColorWhite, 3);
}

From source file:qupath.opencv.classify.OpenCvClassifier.java

License:Open Source License

@Override
public int classifyPathObjects(Collection<PathObject> pathObjects) {

    int counter = 0;
    float[] array = new float[measurements.size()];
    Mat samples = new Mat(1, array.length, CvType.CV_32FC1);

    Mat results = new Mat();

    for (PathObject pathObject : pathObjects) {
        MeasurementList measurementList = pathObject.getMeasurementList();
        int idx = 0;
        for (String m : measurements) {
            double value = measurementList.getMeasurementValue(m);

            if (normScale != null && normOffset != null)
                value = (value + normOffset[idx]) * normScale[idx];

            array[idx] = (float) value;
            idx++;/*  ww w . j  ava 2 s .com*/
        }

        samples.put(0, 0, array);

        try {
            setPredictedClass(classifier, pathClasses, samples, results, pathObject);
            //            float prediction = classifier.predict(samples);
            //            
            ////            float prediction2 = classifier.predict(samples, results, StatModel.RAW_OUTPUT);
            //            float prediction2 = classifier.predict(samples, results, StatModel.RAW_OUTPUT);
            //            
            //            pathObject.setPathClass(pathClasses.get((int)prediction), prediction2);
        } catch (Exception e) {
            pathObject.setPathClass(null);
            logger.trace("Error with samples: " + samples.dump());
            //               e.printStackTrace();
        }
        //         }
        counter++;
    }

    samples.release();
    results.release();

    return counter;
}

From source file:samples.SimpleSample.java

public static void main(String[] args) {

    System.load("C:\\opencv\\build\\java\\x64\\opencv_java310.dll");
    System.out.println(System.getProperty("java.library.path"));
    System.out.println("Welcome to OpenCV " + Core.VERSION);
    Mat m = new Mat(5, 10, CvType.CV_8UC1, new Scalar(0));
    System.out.println("OpenCV Mat: " + m);
    Mat mr1 = m.row(1);/*from   w  w w  . ja  v a2  s . co  m*/
    mr1.setTo(new Scalar(1));
    Mat mc5 = m.col(5);
    mc5.setTo(new Scalar(5));
    System.out.println("OpenCV Mat data:\n" + m.dump());
}