Example usage for org.opencv.core Mat release

List of usage examples for org.opencv.core Mat release

Introduction

In this page you can find the example usage for org.opencv.core Mat release.

Prototype

public void release() 

Source Link

Usage

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void start(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();//from  www. j av a 2s.co  m

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);

    logger.info("My instance id is " + parameter.instanceId());
    if (heartbeatMonitor != null) {
        logger.info("Start waiting for arlocros id: " + (parameter.instanceId() - 1));
        while (true) {
            final Time currentTime = connectedNode.getCurrentTime();
            final Time lastHeartbeatTime = heartbeatMonitor.getLastTimeReceivedMessage();
            if (lastHeartbeatTime != null) {
                final Duration duration = currentTime.subtract(lastHeartbeatTime);
                if (duration.totalNsecs() > 3.0E8) {
                    logger.info("Not received any heartbeat for 300ms. Start running.");
                    break;
                }
            }
        }
    }

    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    final Mat image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    final Mat thresholdedImage = Utils.tresholdContrastBlackWhite(image,
                            parameter.filterBlockSize(), parameter.subtractedConstant(),
                            parameter.invertBlackWhiteColor());
                    image.release();
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    final Mat rvec = new Mat(3, 1, CvType.CV_64F);
                    final MatOfDouble tvec = new MatOfDouble(1.0, 1.0, 1.0);
                    final boolean hasPose = poseProcessor.computePose(rvec, tvec, thresholdedImage);

                    if (!hasPose) {
                        return;
                    }

                    thresholdedImage.release();

                    // publish pose
                    final QuaternionHelper q = new QuaternionHelper();

                    // convert rotation vector result of solvepnp to rotation matrix
                    Mat R = new Mat(3, 3, CvType.CV_32FC1);
                    Calib3d.Rodrigues(rvec, R);
                    // see publishers before for documentation
                    final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
                    R = R.t();
                    final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
                    final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
                    final double attitudeZ = Math.asin(R.get(1, 0)[0]);
                    q.setFromEuler(bankX, headingY, attitudeZ);
                    Core.multiply(R, new Scalar(-1), R);
                    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
                    R.release();
                    final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                            q.getX(), q.getY(), q.getZ(), q.getW());
                    final double x = tvec_map_cam.get(0, 0)[0];
                    final double y = tvec_map_cam.get(1, 0)[0];
                    final double z = tvec_map_cam.get(2, 0)[0];
                    tvec_map_cam.release();

                    final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x,
                            y, z);
                    final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                            translation, rotation);

                    // odom to camera_rgb_optical_frame
                    final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
                    final GraphName targetFrame = GraphName.of("base_link");
                    org.ros.rosjava_geometry.Transform transform_cam_base = null;

                    if (transformationService.canTransform(targetFrame, sourceFrame)) {
                        try {
                            transform_cam_base = transformationService.lookupTransform(targetFrame,
                                    sourceFrame);
                        } catch (Exception e) {
                            log.error(ExceptionUtils.getStackTrace(e));
                            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                    + "base_link! " + "However, will continue..");
                            // cancel this loop..no result can be computed
                            return;
                        }
                    } else {
                        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                + "base_link!" + " However, " + "will continue..");
                        // cancel this loop..no result can be computed
                        return;
                    }

                    // multiply results
                    org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform
                            .identity();
                    current_pose = current_pose.multiply(transform_map_cam);
                    current_pose = current_pose.multiply(transform_cam_base);

                    if (current_pose.getTranslation().getZ() < 0.5) {
                        return;
                    }

                    // check for plausibility of the pose by checking if movement
                    // exceeds max speed (defined) of the robot
                    if (parameter.badPoseReject()) {
                        Time current_timestamp = connectedNode.getCurrentTime();
                        // TODO Unfortunately, we do not have the tf timestamp at
                        // hand here. So we can only use the current timestamp.
                        double maxspeed = 5;
                        boolean goodpose = false;
                        // if (current_pose != null && current_timestamp != null) {
                        if ((last_pose != null && last_timestamp != null)
                                && !Double.isNaN(last_pose.getTranslation().getX())) {
                            // check speed of movement between last and current pose
                            double distance = PoseCompare.distance(current_pose, last_pose);
                            double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                            if ((distance / timedelta) < maxspeed) {
                                if (smoothing) {
                                    double xold = last_pose.getTranslation().getX();
                                    double yold = last_pose.getTranslation().getY();
                                    double zold = last_pose.getTranslation().getZ();
                                    double xnew = current_pose.getTranslation().getX();
                                    double ynew = current_pose.getTranslation().getY();
                                    double znew = current_pose.getTranslation().getZ();
                                    final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                            (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3,
                                            (zold * 2 + znew) / 3);
                                    current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                            current_pose.getRotationAndScale());
                                    last_pose = current_pose;
                                }
                                last_pose = current_pose;
                                last_timestamp = current_timestamp;
                                goodpose = true;
                            } else {
                                log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                                log.info("current pose: " + current_pose.getTranslation().getX() + " "
                                        + current_pose.getTranslation().getY() + " "
                                        + current_pose.getTranslation().getZ());
                                log.info("last pose: " + last_pose.getTranslation().getX() + " "
                                        + last_pose.getTranslation().getY() + " "
                                        + last_pose.getTranslation().getZ());
                            }

                        } else {
                            last_pose = current_pose;
                            last_timestamp = current_timestamp;
                        }
                        // }
                        // bad pose rejection
                        if (!goodpose) {
                            return;
                        }
                    }

                    // set information to message
                    final geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
                    Pose pose = posestamped.getPose();
                    Quaternion orientation = pose.getOrientation();
                    Point point = pose.getPosition();

                    point.setX(current_pose.getTranslation().getX());

                    point.setY(current_pose.getTranslation().getY());

                    point.setZ(current_pose.getTranslation().getZ());

                    orientation.setW(current_pose.getRotationAndScale().getW());
                    orientation.setX(current_pose.getRotationAndScale().getX());
                    orientation.setY(current_pose.getRotationAndScale().getY());
                    orientation.setZ(current_pose.getRotationAndScale().getZ());

                    // frame_id too
                    posestamped.getHeader().setFrameId("map");
                    posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
                    posePublisher.publish(posestamped);
                    mostRecentPose.set(posestamped);

                    //                publishCamFrameToMarkerFrame(rvec, tvec, tfPublisherCamToMarker, connectedNode);
                    //                publishMapToOdom(
                    //                    rvec, tvec, transformationService, tfPublisherMapToOdom, connectedNode);
                    rvec.release();
                    tvec.release();

                } catch (Exception e) {
                    logger.info("An exception occurs.", e);
                }
            }
        }
    });
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void publishCamFrameToMarkerFrame(Mat rvec, Mat tvec,
        Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker, ConnectedNode connectedNode) {
    QuaternionHelper q = new QuaternionHelper();

    /*/*from w  w  w .j  a  va2  s  .  c  om*/
     * http://euclideanspace.com/maths/geometry/rotations/
     * conversions/matrixToEuler/index.htm
     * http://stackoverflow.com/questions/12933284/rodrigues-into-
     * eulerangles-and-vice-versa
     *
     * heading = atan2(-m20,m00) attitude = asin(m10) bank =
     * atan2(-m12,m11)
     */
    // convert output rotation vector rvec to rotation matrix R
    Mat R = new Mat(3, 3, CvType.CV_32FC1);
    Calib3d.Rodrigues(rvec, R);
    // get rotations around X,Y,Z from rotation matrix R
    double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
    double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
    double attitudeZ = Math.asin(R.get(1, 0)[0]);
    R.release();
    // convert Euler angles to quarternion
    q.setFromEuler(bankX, headingY, attitudeZ);

    // set information to message
    TFMessage tfmessage = tfPublisherCamToMarker.newMessage();
    TransformStamped posestamped = connectedNode.getTopicMessageFactory()
            .newFromType(geometry_msgs.TransformStamped._TYPE);
    Transform transform = posestamped.getTransform();

    Quaternion orientation = transform.getRotation();
    Vector3 point = transform.getTranslation();
    point.setX(tvec.get(0, 0)[0]);
    point.setY(tvec.get(1, 0)[0]);
    point.setZ(tvec.get(2, 0)[0]);

    orientation.setW(q.getW());
    orientation.setX(q.getX());
    orientation.setY(q.getY());
    orientation.setZ(q.getZ());
    posestamped.getHeader().setFrameId(parameter.cameraFrameName());
    posestamped.setChildFrameId(parameter.markerFrameName());
    posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
    // frame_id too
    tfmessage.getTransforms().add(posestamped);
    tfPublisherCamToMarker.publish(tfmessage);
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void publishMapToOdom(Mat rvec, Mat tvec, TransformationService transformationService,
        Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom, ConnectedNode connectedNode) {
    // compute transform map to odom from map to
    // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

    // map to camera_rgb_optical_frame
    Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
    QuaternionHelper q = new QuaternionHelper();
    // get rotation matrix R from solvepnp output rotation vector
    // rvec//from  w ww  . jav  a 2  s  . c o  m
    Mat R = new Mat(3, 3, CvType.CV_32FC1);
    Calib3d.Rodrigues(rvec, R);
    // transpose R, because we need the transformation from
    // world(map) to camera
    R = R.t();
    // get rotation around X,Y,Z from R in radiants
    double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
    double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
    double attitudeZ = Math.asin(R.get(1, 0)[0]);
    q.setFromEuler(bankX, headingY, attitudeZ);
    // compute translation vector from world (map) to cam
    // tvec_map_cam
    Core.multiply(R, new Scalar(-1), R); // R=-R
    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec
    R.release();

    org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(), q.getY(),
            q.getZ(), q.getW());
    double x = tvec_map_cam.get(0, 0)[0];
    double y = tvec_map_cam.get(1, 0)[0];
    double z = tvec_map_cam.get(2, 0)[0];
    tvec_map_cam.release();
    // create a Transform Object that hold the transform map to cam
    org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
    org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(translation,
            rotation);

    // odom to camera_rgb_optical_frame
    GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
    GraphName targetFrame = GraphName.of("odom");
    org.ros.rosjava_geometry.Transform transform_cam_odom = null;
    if (transformationService.canTransform(targetFrame, sourceFrame)) {
        try {
            transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
        } catch (Exception e) {
            log.error(ExceptionUtils.getStackTrace(e));
            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! " + ""
                    + "" + "" + "" + "However, " + "will continue..");
            return;
        }
    } else {
        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                + "However, will " + "continue..");
        // cancel this loop..no result can be computed
        return;
    }
    // multiply results
    org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
    result = result.multiply(transform_map_cam);
    result = result.multiply(transform_cam_odom);

    // set information to ROS message
    TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
    TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
            .newFromType(geometry_msgs.TransformStamped._TYPE);
    Transform transform = transformStamped.getTransform();

    Quaternion orientation = transform.getRotation();
    Vector3 vector = transform.getTranslation();
    vector.setX(result.getTranslation().getX());
    vector.setY(result.getTranslation().getY());
    vector.setZ(result.getTranslation().getZ());

    orientation.setW(result.getRotationAndScale().getW());
    orientation.setX(result.getRotationAndScale().getX());
    orientation.setY(result.getRotationAndScale().getY());
    orientation.setZ(result.getRotationAndScale().getZ());
    transformStamped.getHeader().setFrameId("map");
    transformStamped.setChildFrameId("odom");
    transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
    // frame_id too
    tfMessage.getTransforms().add(transformStamped);
    tfPublisherMapToOdom.publish(tfMessage);
}

From source file:arlocros.ComputePose.java

License:Apache License

public boolean computePose(Mat rvec, Mat tvec, Mat image2) throws NyARException, FileNotFoundException {
    // convert image to NyAR style for processing
    final INyARRgbRaster imageRaster = NyARImageHelper.createFromMat(image2);

    // create new marker system configuration
    i_config = new NyARMarkerSystemConfig(i_param);
    markerSystemState = new NyARMarkerSystem(i_config);
    // Create wrapper that passes cam pictures to marker system
    cameraSensorWrapper = new NyARSensor(i_screen_size);
    ids = new int[markerPatterns.size()];
    patternmap = new HashMap<>();
    for (int i = 0; i < markerPatterns.size(); i++) {
        // create marker description from pattern file and add to marker
        // system
        ids[i] = markerSystemState.addARMarker(arCodes.get(i), 25, markerConfig.getMarkerSize());
        patternmap.put(ids[i], markerPatterns.get(i));
    }//from   w ww  .ja v a 2s  .c  o m

    cameraSensorWrapper.update(imageRaster);
    markerSystemState.update(cameraSensorWrapper);

    // init 3D point list
    final List<Point3> points3dlist = new ArrayList<>();
    final List<Point> points2dlist = new ArrayList<>();

    for (final int id : ids) {
        // process only if this marker has been detected
        if (markerSystemState.isExistMarker(id) && markerSystemState.getConfidence(id) > 0.7) {
            // read and add 2D points
            final NyARIntPoint2d[] vertex2d = markerSystemState.getMarkerVertex2D(id);
            Point p = new Point(vertex2d[0].x, vertex2d[0].y);
            points2dlist.add(p);
            p = new Point(vertex2d[1].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[2].x, vertex2d[2].y);
            points2dlist.add(p);
            p = new Point(vertex2d[3].x, vertex2d[3].y);
            points2dlist.add(p);

            //            final MatOfPoint mop = new MatOfPoint();
            //            mop.fromList(points2dlist);
            //            final List<MatOfPoint> pts = new ArrayList<>();
            //            pts.add(mop);

            // read and add corresponding 3D points
            points3dlist.addAll(markerConfig.create3dpointlist(patternmap.get(id)));

            if (visualization) {
                // draw red rectangle around detected marker
                Core.rectangle(image2, new Point(vertex2d[0].x, vertex2d[0].y),
                        new Point(vertex2d[2].x, vertex2d[2].y), new Scalar(0, 0, 255));
                final String markerFile = patternmap.get(id).replaceAll(".*4x4_", "").replace(".patt", "");
                Core.putText(image2, markerFile,
                        new Point((vertex2d[2].x + vertex2d[0].x) / 2.0, vertex2d[0].y - 5), 4, 1,
                        new Scalar(250, 0, 0));
            }
        }

    }
    // load 2D and 3D points to Mats for solvePNP
    final MatOfPoint3f objectPoints = new MatOfPoint3f();
    objectPoints.fromList(points3dlist);
    final MatOfPoint2f imagePoints = new MatOfPoint2f();
    imagePoints.fromList(points2dlist);

    if (visualization) {
        // show image with markers detected
        Imshow.show(image2);
    }

    // do not call solvePNP with empty intput data (no markers detected)
    if (points2dlist.size() == 0) {
        objectPoints.release();
        imagePoints.release();
        return false;
    }

    // uncomment these lines if using RANSAC-based pose estimation (more
    // shaking)
    Mat inliers = new Mat();

    Calib3d.solvePnPRansac(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec, false, 300, 5, 16,
            inliers, Calib3d.CV_P3P);
    ArMarkerPoseEstimator.getLog()
            .info("Points detected: " + points2dlist.size() + " inliers: " + inliers.size());

    objectPoints.release();
    imagePoints.release();

    // avoid publish zero pose if localization failed
    if (inliers.rows() == 0) {
        inliers.release();
        return false;
    }

    inliers.release();
    return true;
}

From source file:arlocros.Utils.java

License:Apache License

static public Mat tresholdContrastBlackWhite(Mat srcImage, int filterBlockSize, double subtractedConstant,
        boolean invertBlackWhiteColor) {

    final Mat transformMat = new Mat(1, 3, CvType.CV_32F);
    final int row = 0;
    final int col = 0;
    transformMat.put(row, col, 0.33, 0.33, 0.34);

    final Mat grayImage = new Mat(srcImage.height(), srcImage.width(), CvType.CV_8UC1);
    Core.transform(srcImage, grayImage, transformMat);

    Mat thresholdedImage = new Mat(grayImage.height(), grayImage.width(), CvType.CV_8UC1);
    Imgproc.adaptiveThreshold(grayImage, thresholdedImage, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C,
            Imgproc.THRESH_BINARY, filterBlockSize, subtractedConstant);
    grayImage.release();

    if (invertBlackWhiteColor) {
        final Mat invertedImage = new Mat(thresholdedImage.height(), thresholdedImage.width(), CvType.CV_8UC1);
        Core.bitwise_not(thresholdedImage, invertedImage);
        thresholdedImage.release();/*from   w w  w  .  j a v  a  2s .c  o  m*/
        thresholdedImage = invertedImage;
    }

    final Mat coloredImage = new Mat(thresholdedImage.height(), thresholdedImage.width(), CvType.CV_8UC3);
    Imgproc.cvtColor(thresholdedImage, coloredImage, Imgproc.COLOR_GRAY2RGB);
    thresholdedImage.release();
    return coloredImage;
}

From source file:at.ac.tuwien.caa.docscan.camera.CameraPreview.java

License:Open Source License

private void oldSingleThread(byte[] pixels) {

    long currentTime = System.currentTimeMillis();

    if (currentTime - mLastTime >= FRAME_TIME_DIFF) {

        synchronized (this) {

            // 1.5 since YUV
            Mat yuv = new Mat((int) (mFrameHeight * 1.5), mFrameWidth, CvType.CV_8UC1);
            yuv.put(0, 0, pixels);/*from   w ww.ja v a2 s  .c  om*/

            if (mFrameMat != null)
                mFrameMat.release();

            mFrameMat = new Mat(mFrameHeight, mFrameWidth, CvType.CV_8UC3);
            Imgproc.cvtColor(yuv, mFrameMat, Imgproc.COLOR_YUV2RGB_NV21);

            if (mStoreMat) {
                ChangeDetector.initNewFrameDetector(mFrameMat);
                mStoreMat = false;
            }

            yuv.release();

            mLastTime = currentTime;

            boolean processFrame = true;

            // This is done in series mode:
            if (mAwaitFrameChanges)
                processFrame = isFrameSteadyAndNew();

            // Check if there should be short break between two successive shots in series mode:
            boolean paused = pauseBetweenShots(currentTime);
            processFrame &= !paused;

            //                If in single mode - or the frame is steady and contains a change, do the document analysis:
            if (processFrame)
                this.notify();

        }

    }

}

From source file:at.ac.tuwien.caa.docscan.camera.CameraPreview.java

License:Open Source License

private void performCVTasks(byte[] pixels) {

    long currentTime = System.currentTimeMillis();

    if (currentTime - mLastTime >= FRAME_TIME_DIFF) {

        //        if (true) {
        Mat mat = byte2Mat(pixels);

        if (mStoreMat) {
            ChangeDetector.initMovementDetector(mat);
            ChangeDetector.initNewFrameDetector(mat);
            mStoreMat = false;/*from   ww w .j  ava2 s .c o  m*/
        } else if (!ChangeDetector.isMovementDetectorInitialized()) {
            ChangeDetector.initMovementDetector(mat);
        }

        boolean processFrame = true;
        // In serial mode the document analysis is just performed if no movement occurred:
        if (mAwaitFrameChanges) {
            if (!mCVThreadManager.isRunning(CVThreadManager.TASK_CHANGE)) {
                ChangeCallable cCallable = new ChangeCallable(mat.clone(), mCVCallback, mTimerCallbacks);
                mCVThreadManager.addCallable(cCallable, CVThreadManager.TASK_CHANGE);
                processFrame = mCVThreadManager.isFrameSteadyAndNew();
            }
        }

        if (processFrame) {

            mTimerCallbacks.onTimerStarted(TaskTimer.TaskType.MOVEMENT_CHECK);
            Mat mat2 = mat.clone();
            mTimerCallbacks.onTimerStopped(TaskTimer.TaskType.MOVEMENT_CHECK);

            if (!mCVThreadManager.isRunning(CVThreadManager.TASK_PAGE)) {
                PageCallable pCallable = new PageCallable(mat.clone(), mCVCallback, mTimerCallbacks, mFrameCnt);
                mCVThreadManager.addCallable(pCallable, CVThreadManager.TASK_PAGE);
            }

            if (mMeasureFocus && !mCVThreadManager.isRunning(CVThreadManager.TASK_FOCUS)) {
                FocusCallable fCallable = new FocusCallable(mat.clone(), mCVCallback, mTimerCallbacks);
                mCVThreadManager.addCallable(fCallable, CVThreadManager.TASK_FOCUS);
            }

            if (mAwaitFrameChanges)
                mVerificationTime = System.currentTimeMillis();

        }

        mat.release();

        mLastTime = currentTime;

    }

}

From source file:at.uniklu.itec.videosummary.Summarize.java

License:GNU General Public License

private double getImageSharpness(File frame) {
    Mat img = Highgui.imread(frame.getAbsolutePath(), 0);
    Mat dx, dy;//from  www.  j av a 2  s . c  o  m
    dx = new Mat();
    dy = new Mat();
    Imgproc.Sobel(img, dx, CvType.CV_32F, 1, 0);
    Imgproc.Sobel(img, dy, CvType.CV_32F, 0, 1);
    Core.magnitude(dx, dy, dx);
    Scalar sum = Core.sumElems(dx);
    img.release();
    dx.release();
    dy.release();
    System.gc();
    System.gc();
    System.gc();
    //System.out.println("Sum of gradients= "+sum);
    return (sum.val[0]);
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

@Override
public Mat applyMathMorphology(Integer radius) {
    Mat dest = new Mat();
    int instrumentSize = radius * 2 + 1;
    Mat kernel = getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(instrumentSize, instrumentSize),
            new Point(radius, radius));

    Imgproc.morphologyEx(currentImage, dest, MORPH_CLOSE, kernel, new Point(-1, -1), 1);

    dest.copyTo(currentImage);//from w ww . ja  v a 2s.c  om
    dest.release();
    return currentImage;
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private Mat applyRayCastingSegmentation() {
    //Mat cannyEdges = CoreOperations.cannyFilter(sourceImage, 26, 58);
    Mat contours = new Mat(preparedImage.rows(), preparedImage.cols(), CvType.CV_32S);
    int contoursCount = /*neurons.size();*/ CoreOperations
            .drawAllContours(CoreOperations.erode(preparedImage, 5), contours);
    Mat result = new Mat(preparedImage.rows(), preparedImage.cols(), preparedImage.type());//CoreOperations.or(CoreOperations.and(cannyEdges, CoreOperations.grayscale(preparedImage)), contours);
    //cannyEdges.release();

    //Mat markers = new Mat(contours.rows(), contours.cols(), CvType.CV_32S);
    //contours.copyTo(markers);
    contours.convertTo(contours, CvType.CV_32S);

    for (Neuron neuron : neurons) {
        int x = (int) neuron.getCenter().x;
        int y = (int) neuron.getCenter().y;
        int color = (int) preparedImage.get(y, x)[0];
        /*contours.put(y, x, color);
        contours.put(y - 2, x, color);/*w w w. j  ava2  s  . c om*/
        contours.put(y + 2, x, color);
        contours.put(y, x - 2, color);
        contours.put(y, x + 2, color);*/
        Imgproc.circle(contours, neuron.getCenter(), (int) (0.4f * neuron.getRadius()), new Scalar(color), -1);
    }

    Imgproc.watershed(sourceImage, contours);

    for (int i = 0; i < contours.rows(); i++) {
        for (int j = 0; j < contours.cols(); j++) {
            int index = (int) contours.get(i, j)[0];
            if (index == -1) {
                result.put(i, j, 0, 0, 0);
            } else if (index <= 0 || index > contoursCount) {
                result.put(i, j, 0, 0, 0);
            } else {
                if (index == 255) {
                    result.put(i, j, 0, 0, 0/*sourceImage.get(i, j)*/);
                } else {
                    result.put(i, j, index, index, index);
                }
            }
        }
    }

    result = CoreOperations.erode(result, 2);
    result = CoreOperations.dilate(result, 3);

    contours.release();

    contours = sourceImage.clone();
    CoreOperations.drawAllContours(result, contours);

    return contours;
}