Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat(Mat m, Range rowRange, Range colRange) 

Source Link

Usage

From source file:airhockeyjava.detection.PS3EyeFrameGrabber.java

License:Open Source License

/**
 * Grab one frame; the caller have to make a copy of returned image before
 * processing.//  www.  j  ava 2  s  .co m
 * 
 * It will throw null pointer exception if not started before grabbing.
 * 
 * @return "read-only" RGB, 3-channel
 */
public Mat grabMat() {
    Mat matImg = new Mat(this.imageHeight, this.imageWidth, CvType.CV_8UC4);
    int[] img = grab_RGB4();
    ByteBuffer byteBuffer = ByteBuffer.allocate(img.length * 4);
    IntBuffer intBuffer = byteBuffer.asIntBuffer();
    intBuffer.put(img);

    byte[] array = byteBuffer.array();
    matImg.put(0, 0, array);
    List<Mat> mv = new ArrayList<Mat>();
    Core.split(matImg, mv);
    mv.remove(0);
    Core.merge(mv, matImg);
    return matImg;

}

From source file:angryhexclient.OurVision.java

License:Open Source License

/**
 * Detects the ground in the image.//from   w  w w  .jav a 2s . co  m
 * @return A list of blocks representing the ground.
 */
public List<Block> detectGround() {
    Mat binaryImage = new Mat(new Size(_nWidth, _nHeight), CvType.CV_8U, new Scalar(1));

    // We only detect right of this margin. The slingshot has some ground
    // colors and would partly be detected as ground. This is not what we
    // want. Trajectories originate at the slingshot, and if there is ground
    // detected at the slingshot, the agent will think, that none of its
    // trajectories are valid. Therefore we start with detecting due right
    // of the slingshot.
    int startAtX = findSlingshot().x + findSlingshot().width * 2;

    // Now we create a binary image of the ground areas. White where there
    // is ground, black otherwise.
    for (int y = 0; y < _nHeight; y++) {
        for (int x = 0; x < _nWidth; x++) {
            if (x > startAtX && isGround(x, y))
                binaryImage.put(y, x, 255);
            else
                binaryImage.put(y, x, 0);
        }
    }

    Mat smoothedImage = new Mat(new Size(_nWidth, _nHeight), CvType.CV_8U, new Scalar(1));

    // This median filter improves the detection tremendously. There are a
    // whole lot of single pixels that carry ground colors spread all over
    // the image. We remove them here.
    Imgproc.medianBlur(binaryImage, smoothedImage, 7);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    // We use OpenCV to find the contours. Contours are lines, that
    // represent the boundaries of the objects in the binary image.
    Imgproc.findContours(smoothedImage, contours, new Mat(), Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE);

    ArrayList<Block> result = new ArrayList<Block>();

    //Now for every contour, we convert it to blocks for communicating them to DLV.
    for (MatOfPoint mp : contours) {
        org.opencv.core.Point[] pts = mp.toArray();

        for (int i = 0; i < pts.length - 1; i++) {
            Block b = new Block((int) pts[i].x, (int) pts[i].y);
            b.add((int) pts[i + 1].x, (int) pts[i + 1].y);
            result.add(b);
        }

        //One block for the first vertex to the last vertex.
        Block b = new Block((int) pts[pts.length - 1].x, (int) pts[pts.length - 1].y);
        b.add((int) pts[0].x, (int) pts[0].y);
        result.add(b);
    }

    return result;
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void start(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();/* w  w  w .ja  v  a 2s . c om*/

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);

    logger.info("My instance id is " + parameter.instanceId());
    if (heartbeatMonitor != null) {
        logger.info("Start waiting for arlocros id: " + (parameter.instanceId() - 1));
        while (true) {
            final Time currentTime = connectedNode.getCurrentTime();
            final Time lastHeartbeatTime = heartbeatMonitor.getLastTimeReceivedMessage();
            if (lastHeartbeatTime != null) {
                final Duration duration = currentTime.subtract(lastHeartbeatTime);
                if (duration.totalNsecs() > 3.0E8) {
                    logger.info("Not received any heartbeat for 300ms. Start running.");
                    break;
                }
            }
        }
    }

    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    final Mat image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    final Mat thresholdedImage = Utils.tresholdContrastBlackWhite(image,
                            parameter.filterBlockSize(), parameter.subtractedConstant(),
                            parameter.invertBlackWhiteColor());
                    image.release();
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    final Mat rvec = new Mat(3, 1, CvType.CV_64F);
                    final MatOfDouble tvec = new MatOfDouble(1.0, 1.0, 1.0);
                    final boolean hasPose = poseProcessor.computePose(rvec, tvec, thresholdedImage);

                    if (!hasPose) {
                        return;
                    }

                    thresholdedImage.release();

                    // publish pose
                    final QuaternionHelper q = new QuaternionHelper();

                    // convert rotation vector result of solvepnp to rotation matrix
                    Mat R = new Mat(3, 3, CvType.CV_32FC1);
                    Calib3d.Rodrigues(rvec, R);
                    // see publishers before for documentation
                    final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
                    R = R.t();
                    final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
                    final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
                    final double attitudeZ = Math.asin(R.get(1, 0)[0]);
                    q.setFromEuler(bankX, headingY, attitudeZ);
                    Core.multiply(R, new Scalar(-1), R);
                    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
                    R.release();
                    final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                            q.getX(), q.getY(), q.getZ(), q.getW());
                    final double x = tvec_map_cam.get(0, 0)[0];
                    final double y = tvec_map_cam.get(1, 0)[0];
                    final double z = tvec_map_cam.get(2, 0)[0];
                    tvec_map_cam.release();

                    final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x,
                            y, z);
                    final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                            translation, rotation);

                    // odom to camera_rgb_optical_frame
                    final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
                    final GraphName targetFrame = GraphName.of("base_link");
                    org.ros.rosjava_geometry.Transform transform_cam_base = null;

                    if (transformationService.canTransform(targetFrame, sourceFrame)) {
                        try {
                            transform_cam_base = transformationService.lookupTransform(targetFrame,
                                    sourceFrame);
                        } catch (Exception e) {
                            log.error(ExceptionUtils.getStackTrace(e));
                            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                    + "base_link! " + "However, will continue..");
                            // cancel this loop..no result can be computed
                            return;
                        }
                    } else {
                        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                + "base_link!" + " However, " + "will continue..");
                        // cancel this loop..no result can be computed
                        return;
                    }

                    // multiply results
                    org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform
                            .identity();
                    current_pose = current_pose.multiply(transform_map_cam);
                    current_pose = current_pose.multiply(transform_cam_base);

                    if (current_pose.getTranslation().getZ() < 0.5) {
                        return;
                    }

                    // check for plausibility of the pose by checking if movement
                    // exceeds max speed (defined) of the robot
                    if (parameter.badPoseReject()) {
                        Time current_timestamp = connectedNode.getCurrentTime();
                        // TODO Unfortunately, we do not have the tf timestamp at
                        // hand here. So we can only use the current timestamp.
                        double maxspeed = 5;
                        boolean goodpose = false;
                        // if (current_pose != null && current_timestamp != null) {
                        if ((last_pose != null && last_timestamp != null)
                                && !Double.isNaN(last_pose.getTranslation().getX())) {
                            // check speed of movement between last and current pose
                            double distance = PoseCompare.distance(current_pose, last_pose);
                            double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                            if ((distance / timedelta) < maxspeed) {
                                if (smoothing) {
                                    double xold = last_pose.getTranslation().getX();
                                    double yold = last_pose.getTranslation().getY();
                                    double zold = last_pose.getTranslation().getZ();
                                    double xnew = current_pose.getTranslation().getX();
                                    double ynew = current_pose.getTranslation().getY();
                                    double znew = current_pose.getTranslation().getZ();
                                    final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                            (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3,
                                            (zold * 2 + znew) / 3);
                                    current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                            current_pose.getRotationAndScale());
                                    last_pose = current_pose;
                                }
                                last_pose = current_pose;
                                last_timestamp = current_timestamp;
                                goodpose = true;
                            } else {
                                log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                                log.info("current pose: " + current_pose.getTranslation().getX() + " "
                                        + current_pose.getTranslation().getY() + " "
                                        + current_pose.getTranslation().getZ());
                                log.info("last pose: " + last_pose.getTranslation().getX() + " "
                                        + last_pose.getTranslation().getY() + " "
                                        + last_pose.getTranslation().getZ());
                            }

                        } else {
                            last_pose = current_pose;
                            last_timestamp = current_timestamp;
                        }
                        // }
                        // bad pose rejection
                        if (!goodpose) {
                            return;
                        }
                    }

                    // set information to message
                    final geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
                    Pose pose = posestamped.getPose();
                    Quaternion orientation = pose.getOrientation();
                    Point point = pose.getPosition();

                    point.setX(current_pose.getTranslation().getX());

                    point.setY(current_pose.getTranslation().getY());

                    point.setZ(current_pose.getTranslation().getZ());

                    orientation.setW(current_pose.getRotationAndScale().getW());
                    orientation.setX(current_pose.getRotationAndScale().getX());
                    orientation.setY(current_pose.getRotationAndScale().getY());
                    orientation.setZ(current_pose.getRotationAndScale().getZ());

                    // frame_id too
                    posestamped.getHeader().setFrameId("map");
                    posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
                    posePublisher.publish(posestamped);
                    mostRecentPose.set(posestamped);

                    //                publishCamFrameToMarkerFrame(rvec, tvec, tfPublisherCamToMarker, connectedNode);
                    //                publishMapToOdom(
                    //                    rvec, tvec, transformationService, tfPublisherMapToOdom, connectedNode);
                    rvec.release();
                    tvec.release();

                } catch (Exception e) {
                    logger.info("An exception occurs.", e);
                }
            }
        }
    });
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void publishCamFrameToMarkerFrame(Mat rvec, Mat tvec,
        Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker, ConnectedNode connectedNode) {
    QuaternionHelper q = new QuaternionHelper();

    /*//from w  w w  .  j  a v a 2s. com
     * http://euclideanspace.com/maths/geometry/rotations/
     * conversions/matrixToEuler/index.htm
     * http://stackoverflow.com/questions/12933284/rodrigues-into-
     * eulerangles-and-vice-versa
     *
     * heading = atan2(-m20,m00) attitude = asin(m10) bank =
     * atan2(-m12,m11)
     */
    // convert output rotation vector rvec to rotation matrix R
    Mat R = new Mat(3, 3, CvType.CV_32FC1);
    Calib3d.Rodrigues(rvec, R);
    // get rotations around X,Y,Z from rotation matrix R
    double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
    double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
    double attitudeZ = Math.asin(R.get(1, 0)[0]);
    R.release();
    // convert Euler angles to quarternion
    q.setFromEuler(bankX, headingY, attitudeZ);

    // set information to message
    TFMessage tfmessage = tfPublisherCamToMarker.newMessage();
    TransformStamped posestamped = connectedNode.getTopicMessageFactory()
            .newFromType(geometry_msgs.TransformStamped._TYPE);
    Transform transform = posestamped.getTransform();

    Quaternion orientation = transform.getRotation();
    Vector3 point = transform.getTranslation();
    point.setX(tvec.get(0, 0)[0]);
    point.setY(tvec.get(1, 0)[0]);
    point.setZ(tvec.get(2, 0)[0]);

    orientation.setW(q.getW());
    orientation.setX(q.getX());
    orientation.setY(q.getY());
    orientation.setZ(q.getZ());
    posestamped.getHeader().setFrameId(parameter.cameraFrameName());
    posestamped.setChildFrameId(parameter.markerFrameName());
    posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
    // frame_id too
    tfmessage.getTransforms().add(posestamped);
    tfPublisherCamToMarker.publish(tfmessage);
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void publishMapToOdom(Mat rvec, Mat tvec, TransformationService transformationService,
        Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom, ConnectedNode connectedNode) {
    // compute transform map to odom from map to
    // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

    // map to camera_rgb_optical_frame
    Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
    QuaternionHelper q = new QuaternionHelper();
    // get rotation matrix R from solvepnp output rotation vector
    // rvec/*from  w w w  .  j  a  va2s .  c o m*/
    Mat R = new Mat(3, 3, CvType.CV_32FC1);
    Calib3d.Rodrigues(rvec, R);
    // transpose R, because we need the transformation from
    // world(map) to camera
    R = R.t();
    // get rotation around X,Y,Z from R in radiants
    double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
    double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
    double attitudeZ = Math.asin(R.get(1, 0)[0]);
    q.setFromEuler(bankX, headingY, attitudeZ);
    // compute translation vector from world (map) to cam
    // tvec_map_cam
    Core.multiply(R, new Scalar(-1), R); // R=-R
    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec
    R.release();

    org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(), q.getY(),
            q.getZ(), q.getW());
    double x = tvec_map_cam.get(0, 0)[0];
    double y = tvec_map_cam.get(1, 0)[0];
    double z = tvec_map_cam.get(2, 0)[0];
    tvec_map_cam.release();
    // create a Transform Object that hold the transform map to cam
    org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
    org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(translation,
            rotation);

    // odom to camera_rgb_optical_frame
    GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
    GraphName targetFrame = GraphName.of("odom");
    org.ros.rosjava_geometry.Transform transform_cam_odom = null;
    if (transformationService.canTransform(targetFrame, sourceFrame)) {
        try {
            transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
        } catch (Exception e) {
            log.error(ExceptionUtils.getStackTrace(e));
            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! " + ""
                    + "" + "" + "" + "However, " + "will continue..");
            return;
        }
    } else {
        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                + "However, will " + "continue..");
        // cancel this loop..no result can be computed
        return;
    }
    // multiply results
    org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
    result = result.multiply(transform_map_cam);
    result = result.multiply(transform_cam_odom);

    // set information to ROS message
    TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
    TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
            .newFromType(geometry_msgs.TransformStamped._TYPE);
    Transform transform = transformStamped.getTransform();

    Quaternion orientation = transform.getRotation();
    Vector3 vector = transform.getTranslation();
    vector.setX(result.getTranslation().getX());
    vector.setY(result.getTranslation().getY());
    vector.setZ(result.getTranslation().getZ());

    orientation.setW(result.getRotationAndScale().getW());
    orientation.setX(result.getRotationAndScale().getX());
    orientation.setY(result.getRotationAndScale().getY());
    orientation.setZ(result.getRotationAndScale().getZ());
    transformStamped.getHeader().setFrameId("map");
    transformStamped.setChildFrameId("odom");
    transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
    // frame_id too
    tfMessage.getTransforms().add(transformStamped);
    tfPublisherMapToOdom.publish(tfMessage);
}

From source file:arlocros.CameraParams.java

License:Apache License

public static MatOfDouble getDistCoeffs(CameraParams cameraParams) {
    final MatOfDouble distCoeffs = new MatOfDouble(new Mat(4, 1, CvType.CV_64FC1));
    distCoeffs.put(0, 0, cameraParams.k1());
    distCoeffs.put(1, 0, cameraParams.k2());
    distCoeffs.put(2, 0, cameraParams.p1());
    distCoeffs.put(3, 0, cameraParams.p2());
    return distCoeffs;
}

From source file:arlocros.Utils.java

License:Apache License

static public Mat matFromImage(final Image source) throws Exception {
    byte[] imageInBytes = source.getData().array();
    imageInBytes = Arrays.copyOfRange(imageInBytes, source.getData().arrayOffset(), imageInBytes.length);
    Mat cvImage = new Mat(source.getHeight(), source.getWidth(), CvType.CV_8UC3);
    cvImage.put(0, 0, imageInBytes);// ww  w.j  a v a 2s  . c om
    return cvImage;
}

From source file:arlocros.Utils.java

License:Apache License

static public Mat tresholdContrastBlackWhite(Mat srcImage, int filterBlockSize, double subtractedConstant,
        boolean invertBlackWhiteColor) {

    final Mat transformMat = new Mat(1, 3, CvType.CV_32F);
    final int row = 0;
    final int col = 0;
    transformMat.put(row, col, 0.33, 0.33, 0.34);

    final Mat grayImage = new Mat(srcImage.height(), srcImage.width(), CvType.CV_8UC1);
    Core.transform(srcImage, grayImage, transformMat);

    Mat thresholdedImage = new Mat(grayImage.height(), grayImage.width(), CvType.CV_8UC1);
    Imgproc.adaptiveThreshold(grayImage, thresholdedImage, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, filterBlockSize, subtractedConstant);
    grayImage.release();/*www.ja va 2s .co  m*/

    if (invertBlackWhiteColor) {
        final Mat invertedImage = new Mat(thresholdedImage.height(), thresholdedImage.width(), CvType.CV_8UC1);
        Core.bitwise_not(thresholdedImage, invertedImage);
        thresholdedImage.release();
        thresholdedImage = invertedImage;
    }

    final Mat coloredImage = new Mat(thresholdedImage.height(), thresholdedImage.width(), CvType.CV_8UC3);
    Imgproc.cvtColor(thresholdedImage, coloredImage, Imgproc.COLOR_GRAY2RGB);
    thresholdedImage.release();
    return coloredImage;
}

From source file:at.ac.tuwien.caa.docscan.camera.CameraPreview.java

License:Open Source License

private void oldSingleThread(byte[] pixels) {

    long currentTime = System.currentTimeMillis();

    if (currentTime - mLastTime >= FRAME_TIME_DIFF) {

        synchronized (this) {

            // 1.5 since YUV
            Mat yuv = new Mat((int) (mFrameHeight * 1.5), mFrameWidth, CvType.CV_8UC1);
            yuv.put(0, 0, pixels);//from   w  w  w .  j  ava2 s.c  o m

            if (mFrameMat != null)
                mFrameMat.release();

            mFrameMat = new Mat(mFrameHeight, mFrameWidth, CvType.CV_8UC3);
            Imgproc.cvtColor(yuv, mFrameMat, Imgproc.COLOR_YUV2RGB_NV21);

            if (mStoreMat) {
                ChangeDetector.initNewFrameDetector(mFrameMat);
                mStoreMat = false;
            }

            yuv.release();

            mLastTime = currentTime;

            boolean processFrame = true;

            // This is done in series mode:
            if (mAwaitFrameChanges)
                processFrame = isFrameSteadyAndNew();

            // Check if there should be short break between two successive shots in series mode:
            boolean paused = pauseBetweenShots(currentTime);
            processFrame &= !paused;

            //                If in single mode - or the frame is steady and contains a change, do the document analysis:
            if (processFrame)
                this.notify();

        }

    }

}

From source file:at.ac.tuwien.caa.docscan.camera.CameraPreview.java

License:Open Source License

public boolean isFrameSame(Mat frame1, Mat frame2) {

    Mat tmp1 = new Mat(frame1.rows(), frame1.cols(), CvType.CV_8UC1);
    Imgproc.cvtColor(frame1, tmp1, Imgproc.COLOR_RGB2GRAY);

    Mat tmp2 = new Mat(frame2.rows(), frame2.cols(), CvType.CV_8UC1);
    Imgproc.cvtColor(frame2, tmp2, Imgproc.COLOR_RGB2GRAY);

    Mat subtractResult = new Mat(frame2.rows(), frame2.cols(), CvType.CV_8UC1);
    Core.absdiff(frame1, frame2, subtractResult);
    Imgproc.threshold(subtractResult, subtractResult, 50, 1, Imgproc.THRESH_BINARY);
    Scalar sumDiff = Core.sumElems(subtractResult);
    double diffRatio = sumDiff.val[0] / (frame1.cols() * frame2.rows());

    return diffRatio < .05;

}