Example usage for org.opencv.core Mat get

List of usage examples for org.opencv.core Mat get

Introduction

In this page you can find the example usage for org.opencv.core Mat get.

Prototype

public double[] get(int row, int col) 

Source Link

Usage

From source file:com.carver.paul.truesight.ImageRecognition.ImageTools.java

License:Open Source License

public static void drawLinesOnImage(Mat lines, Mat image) {
    for (int i = 0; i < lines.rows(); i++) {
        double[] val = lines.get(i, 0);
        Imgproc.line(image, new Point(val[0], val[1]), new Point(val[2], val[3]), new Scalar(0, 255, 0), 2);
    }// ww w .  j a  v  a2  s  . c  om
}

From source file:com.carver.paul.truesight.ImageRecognition.ImageTools.java

License:Open Source License

public static int findLongestLine(List<Mat> linesList) {
    int longestLine = 0;

    for (Mat lines : linesList) {
        for (int i = 0; i < lines.rows(); i++) {
            if (lineLength(lines.get(i, 0)) > longestLine)
                longestLine = lineLength(lines.get(i, 0));
        }/*from  w  ww .  j  a v a 2  s  . co  m*/
    }

    return longestLine;
}

From source file:com.carver.paul.truesight.ImageRecognition.RecognitionModel.java

License:Open Source License

private static void adjustXPosOfLines(Mat lines, int xPosAdjustment) {
    if (xPosAdjustment == 0)
        return;//from ww  w . j  a  v a 2  s.  co  m

    for (int i = 0; i < lines.rows(); i++) {
        double[] line = lines.get(i, 0);
        line[0] += xPosAdjustment;
        line[2] += xPosAdjustment;
        lines.put(i, 0, line);
    }
}

From source file:com.carver.paul.truesight.ImageRecognition.RecognitionModel.java

License:Open Source License

public HeroLine(Mat lines) {
    //rect = new android.graphics.Rect();//0, 0, -1, -1);

    if (lines.rows() == 0) {
        isRealLine = false;//from www .  j a va  2 s.co m
    } else {
        isRealLine = true;
        for (int i = 0; i < lines.rows(); i++) {
            double[] val = lines.get(i, 0);
            if (i == 0) {
                initialiseRect(val);
            } else {
                rect.union((int) val[0], (int) val[1]);
                rect.union((int) val[2], (int) val[3]);
            }
        }
        //System.out.println("Created rect with width: " + rect.width());
    }
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.ARLoc.java

License:Apache License

@Override
public void onStart(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();// w ww.j a  va  2s.c om
    log.info("Reading parameters");
    this.parameter = Parameter.createFromParameterTree(connectedNode.getParameterTree());

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    // setup rotation vector and translation vector storing output of the
    // localization
    rvec = new Mat(3, 1, CvType.CV_64F);
    tvec = new MatOfDouble(1.0, 1.0, 1.0);

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    //Utils.tresholdContrastBlackWhite(image, 600);
                    Imgproc.threshold(image, image, 200, 255, Imgproc.THRESH_BINARY);
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    if (poseProcessor.computePose(rvec, tvec, image)) {
                        // notify publisher threads (pose and tf, see below)
                        synchronized (tvec) {
                            tvec.notifyAll();
                        }
                    }

                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    });

    // publish tf CAMERA_FRAME_NAME --> MARKER_FRAME_NAME
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            synchronized (tvec) {
                tvec.wait();
            }

            QuaternionHelper q = new QuaternionHelper();

            /*
             * http://euclideanspace.com/maths/geometry/rotations/
             * conversions/matrixToEuler/index.htm
             * http://stackoverflow.com/questions/12933284/rodrigues-into-
             * eulerangles-and-vice-versa
             * 
             * heading = atan2(-m20,m00) attitude = asin(m10) bank =
             * atan2(-m12,m11)
             */
            // convert output rotation vector rvec to rotation matrix R
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // get rotations around X,Y,Z from rotation matrix R
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            // convert Euler angles to quarternion
            q.setFromEuler(bankX, headingY, attitudeZ);

            // set information to message
            TFMessage tfmessage = tfPublisherCamToMarker.newMessage();
            TransformStamped posestamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = posestamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 point = transform.getTranslation();
            point.setX(tvec.get(0, 0)[0]);
            point.setY(tvec.get(1, 0)[0]);
            point.setZ(tvec.get(2, 0)[0]);

            orientation.setW(q.getW());
            orientation.setX(q.getX());
            orientation.setY(q.getY());
            orientation.setZ(q.getZ());
            posestamped.getHeader().setFrameId(parameter.cameraFrameName());
            posestamped.setChildFrameId(parameter.markerFrameName());
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfmessage.getTransforms().add(posestamped);
            tfPublisherCamToMarker.publish(tfmessage);
        }
    });

    // publish Markers
    final Publisher<visualization_msgs.Marker> markerPublisher = connectedNode.newPublisher("markers",
            visualization_msgs.Marker._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {
            // publish markers every 500ms
            Thread.sleep(500);
            // get marker points from markerConfig, each marker has 4
            // vertices
            List<Point3> points3dlist = markerConfig.getUnordered3DPointList();
            int i = 0;
            for (Point3 p : points3dlist) {
                Marker markermessage = markerPublisher.newMessage();
                // FIXME If the markers are published into an existing frame
                // (e.g. map or odom) the node will consume very high CPU
                // and will fail after a short time. The markers are
                // probably published in the wrong way.
                markermessage.getHeader().setFrameId(parameter.markerFrameName());
                markermessage.setId(i);
                i++;
                markermessage.setType(visualization_msgs.Marker.SPHERE);
                markermessage.setAction(visualization_msgs.Marker.ADD);
                // position
                double x = p.x;
                markermessage.getPose().getPosition().setX(x);
                double y = p.y;
                markermessage.getPose().getPosition().setY(y);
                double z = p.z;
                markermessage.getPose().getPosition().setZ(z);
                // orientation
                markermessage.getPose().getOrientation().setX(0);
                markermessage.getPose().getOrientation().setY(0);
                markermessage.getPose().getOrientation().setZ(0);
                markermessage.getPose().getOrientation().setW(1);
                // patterntSize
                markermessage.getScale().setX(0.1);
                markermessage.getScale().setY(0.1);
                markermessage.getScale().setZ(0.1);
                // color
                markermessage.getColor().setA(1);
                markermessage.getColor().setR(1);
                markermessage.getColor().setG(0);
                markermessage.getColor().setB(0);

                markerPublisher.publish(markermessage);
            }
        }
    });

    // publish tf map --> odom
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait to be notified if new
            // image was processed
            synchronized (tvec) {
                tvec.wait();
            }

            // compute transform map to odom from map to
            // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

            // map to camera_rgb_optical_frame
            Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            QuaternionHelper q = new QuaternionHelper();
            // get rotation matrix R from solvepnp output rotation vector
            // rvec
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // transpose R, because we need the transformation from
            // world(map) to camera
            R = R.t();
            // get rotation around X,Y,Z from R in radiants
            double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            // compute translation vector from world (map) to cam
            // tvec_map_cam
            Core.multiply(R, new Scalar(-1), R); // R=-R
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec

            org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(),
                    q.getY(), q.getZ(), q.getW());
            double x = tvec_map_cam.get(0, 0)[0];
            double y = tvec_map_cam.get(1, 0)[0];
            double z = tvec_map_cam.get(2, 0)[0];
            // create a Transform Object that hold the transform map to cam
            org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            GraphName targetFrame = GraphName.of("odom");
            org.ros.rosjava_geometry.Transform transform_cam_odom = null;
            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    e.printStackTrace();
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "odom! " + "However, " + "will continue..");
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                        + "However, will " + "continue..");
                // cancel this loop..no result can be computed
                return;
            }
            // multiply results
            org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
            result = result.multiply(transform_map_cam);
            result = result.multiply(transform_cam_odom);

            // set information to ROS message
            TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
            TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
                    .newFromType(geometry_msgs.TransformStamped._TYPE);
            Transform transform = transformStamped.getTransform();

            Quaternion orientation = transform.getRotation();
            Vector3 vector = transform.getTranslation();
            vector.setX(result.getTranslation().getX());
            vector.setY(result.getTranslation().getY());
            vector.setZ(result.getTranslation().getZ());

            orientation.setW(result.getRotationAndScale().getW());
            orientation.setX(result.getRotationAndScale().getX());
            orientation.setY(result.getRotationAndScale().getY());
            orientation.setZ(result.getRotationAndScale().getZ());
            transformStamped.getHeader().setFrameId("map");
            transformStamped.setChildFrameId("odom");
            transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
            // frame_id too
            tfMessage.getTransforms().add(transformStamped);
            tfPublisherMapToOdom.publish(tfMessage);
            // System.exit(0);
        }
    });

    // Publish Pose

    final Publisher<geometry_msgs.PoseStamped> posePublisher = connectedNode
            .newPublisher(parameter.poseTopicName(), geometry_msgs.PoseStamped._TYPE);

    connectedNode.executeCancellableLoop(new CancellableLoop() {

        @Override
        protected void loop() throws InterruptedException {

            // since this is an infinite loop, wait here to be notified if
            // new image was processed
            synchronized (tvec) {
                tvec.wait();
            }
            final QuaternionHelper q = new QuaternionHelper();

            // convert rotation vector result of solvepnp to rotation matrix
            Mat R = new Mat(3, 3, CvType.CV_32FC1);
            Calib3d.Rodrigues(rvec, R);
            // see publishers before for documentation
            final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
            R = R.t();
            final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
            final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
            final double attitudeZ = Math.asin(R.get(1, 0)[0]);
            q.setFromEuler(bankX, headingY, attitudeZ);
            Core.multiply(R, new Scalar(-1), R);
            Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
            final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                    q.getX(), q.getY(), q.getZ(), q.getW());
            final double x = tvec_map_cam.get(0, 0)[0];
            final double y = tvec_map_cam.get(1, 0)[0];
            final double z = tvec_map_cam.get(2, 0)[0];

            final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
            final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                    translation, rotation);

            // odom to camera_rgb_optical_frame
            final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
            final GraphName targetFrame = GraphName.of("base_link");
            org.ros.rosjava_geometry.Transform transform_cam_base = null;

            if (transformationService.canTransform(targetFrame, sourceFrame)) {
                try {
                    transform_cam_base = transformationService.lookupTransform(targetFrame, sourceFrame);
                } catch (Exception e) {
                    e.printStackTrace();
                    log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                            + "base_link! " + "However, will continue..");
                    // cancel this loop..no result can be computed
                    return;
                }
            } else {
                log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                        + "base_link!" + " However, " + "will continue..");
                // cancel this loop..no result can be computed
                return;
            }

            // multiply results
            org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform.identity();
            current_pose = current_pose.multiply(transform_map_cam);
            current_pose = current_pose.multiply(transform_cam_base);

            // check for plausibility of the pose by checking if movement
            // exceeds max speed (defined) of the robot
            if (parameter.badPoseReject()) {
                Time current_timestamp = connectedNode.getCurrentTime();
                // TODO Unfortunately, we do not have the tf timestamp at
                // hand here. So we can only use the current timestamp.
                double maxspeed = 5;
                boolean goodpose = false;
                // if (current_pose != null && current_timestamp != null) {
                if (last_pose != null && last_timestamp != null) {
                    // check speed of movement between last and current pose
                    double distance = PoseCompare.distance(current_pose, last_pose);
                    double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                    if ((distance / timedelta) < maxspeed) {
                        if (smoothing) {
                            double xold = last_pose.getTranslation().getX();
                            double yold = last_pose.getTranslation().getY();
                            double zold = last_pose.getTranslation().getZ();
                            double xnew = current_pose.getTranslation().getX();
                            double ynew = current_pose.getTranslation().getY();
                            double znew = current_pose.getTranslation().getZ();
                            final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                    (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3, (zold * 2 + znew) / 3);
                            current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                    current_pose.getRotationAndScale());
                            last_pose = current_pose;
                        }
                        last_pose = current_pose;
                        last_timestamp = current_timestamp;
                        goodpose = true;
                    } else {
                        log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                    }

                } else {
                    last_pose = current_pose;
                    last_timestamp = current_timestamp;
                }
                // }
                // bad pose rejection
                if (!goodpose) {
                    return;
                }
            }

            // set information to message
            geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
            Pose pose = posestamped.getPose();
            Quaternion orientation = pose.getOrientation();
            Point point = pose.getPosition();

            point.setX(current_pose.getTranslation().getX());

            point.setY(current_pose.getTranslation().getY());

            point.setZ(current_pose.getTranslation().getZ());

            orientation.setW(current_pose.getRotationAndScale().getW());
            orientation.setX(current_pose.getRotationAndScale().getX());
            orientation.setY(current_pose.getRotationAndScale().getY());
            orientation.setZ(current_pose.getRotationAndScale().getZ());

            // frame_id too
            posestamped.getHeader().setFrameId("map");
            posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
            posePublisher.publish(posestamped);

        }
    });

}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.Utils.java

License:Apache License

static public void tresholdContrastBlackWhite(Mat image2, double d) {
    int width = image2.width();
    int height = image2.height();
    for (int i = 0; i < width; i++)
        for (int j = 0; j < height; j++) {
            double[] rgb = image2.get(j, i);
            double[] rgbnew = new double[rgb.length];
            if (rgb[0] + rgb[1] + rgb[2] < d)
                rgbnew[0] = rgbnew[1] = rgbnew[2] = 0.0;
            else/*from  w  w w .  j av  a2  s . com*/
                rgbnew[0] = rgbnew[1] = rgbnew[2] = 255.0;
            image2.put(j, i, rgbnew);
        }
}

From source file:com.joravasal.keyface.EigenFacesActivity.java

License:Open Source License

/**
 * Converts a matrix with any values into a matrix with correct values (between 0 and 255, both included) to be shown as an image.
 * @param mat: The matrix to convert/*from w ww .ja  v a  2  s.  c o  m*/
 * @return A matrix that can be used as an image
 */
private Mat toGrayscale(Mat mat) {
    Mat res = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC1);
    double min, max;
    MinMaxLocResult minmax = Core.minMaxLoc(mat);
    min = minmax.minVal;
    max = minmax.maxVal;
    for (int row = 0; row < mat.rows(); row++) {
        for (int col = 0; col < mat.cols(); col++) {
            res.put(row, col, 255 * ((mat.get(row, col)[0] - min) / (max - min)));
        }
    }
    return res;
}

From source file:com.joravasal.keyface.PCAfaceRecog.java

License:Open Source License

/**
 * It gets information on the closest image, the distance, the second closest, the distance from this one,
 * and the furthest image with its distance as well.
 * /*from w  w  w  .  j  a v a  2 s . co m*/
 * The difference from result and closest image is that the second one might not be close enough depending 
 * on the threshold, so result will be -1, but closest image will still be an image. If not, their values are equal.
 */
private AlgorithmReturnValue findClosest(Mat toCompare) {

    AlgorithmReturnValue result = new AlgorithmReturnValue();

    for (int i = 0; i < numImages; i++) {
        double dist = 0;
        for (int j = 0; j < numImages; j++) {
            if (Integer.parseInt(KeyFaceActivity.prefs.getString("distAlgPref", "1")) == 1) { //Case is 1 -> Euclidean distance
                dist += DistanceAlgorithm.euclideanDist(toCompare.get(0, j)[0], projectedTraining.get(i, j)[0]);
            } else { //Case is 0 -> Rectilinear Distance
                dist += DistanceAlgorithm.rectilinearDist(toCompare.get(0, j)[0],
                        projectedTraining.get(i, j)[0]);
            }
        }
        if (dist < result.getDistClosestImage()) {
            result.setDistSecondClosestImage(result.getDistClosestImage());
            result.setDistClosestImage(dist);
            result.setSecondClosestImage(result.getClosestImage());
            result.setClosestImage(i);
        } else if (dist < result.getDistSecondClosestImage()) {
            result.setDistSecondClosestImage(dist);
            result.setSecondClosestImage(i);
        }
        if (dist > result.getDistFarthestImage()) {
            result.setDistFarthestImage(dist);
            result.setFarthestImage(i);
        }
    }

    //We define the threshold depending on the preferences value multiplied by 10 exp 5
    result.setThreshold(new Double(KeyFaceActivity.prefs.getString("threshold", "50")) * 100000.0);

    if (result.getDistClosestImage() < result.getThreshold())
        result.setResult(result.getClosestImage());
    return result;
}

From source file:com.mycompany.linedetection.LineDetector.java

public void findLines() {
    Imgproc.Canny(img, edgeDetectedImg, 100, 200, 3, true);
    Mat lines = new Mat();

    int width = img.width();
    int height = img.height();
    double diagonal = Math.sqrt(width * width + height * height);
    int minOfWidthHeight = (width < height) ? width : height;

    Imgproc.HoughLinesP(edgeDetectedImg, lines, 1, Math.PI / 180, minOfWidthHeight * 10 / 100,
            diagonal * 25 / 100, diagonal * 4 / 100);

    int firstN = (lines.rows() < 5) ? lines.rows() : 5;

    for (int x = 0; x < lines.rows(); x++) {
        double[] vec = lines.get(x, 0);
        double[] vec1 = lines.get(x, 1);
        double x1 = vec[0], y1 = vec[1], x2 = vec[2], y2 = vec[3];
        Point startPoint = new Point(x1, y1);
        Point endPoint = new Point(x2, y2);

        double angle_inv = horizontalLine.getAngle(new Line(x1, y1, x2, y2));
        double angle = horizontalLine.getAngle(new Line(x2, y2, x1, y1));
        if ((angle >= diagAngle1 - DIAGONAL_TRESHOLD && angle <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle >= diagAngle2 - DIAGONAL_TRESHOLD && angle <= diagAngle2 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle1 - DIAGONAL_TRESHOLD && angle_inv <= diagAngle1 + DIAGONAL_TRESHOLD)
                || (angle_inv >= diagAngle2 - DIAGONAL_TRESHOLD
                        && angle_inv <= diagAngle2 + DIAGONAL_TRESHOLD)) {
            diagonalLineList.add(new Line(x1, y1, x2, y2));
            Imgproc.line(img, startPoint, endPoint, new Scalar(255, 255, 0), 4);
        } else {//  w w w. jav  a 2  s . c  om
            lineList.add(new Line(x1, y1, x2, y2));
        }

    }

    Collections.sort(lineList, new Comparator<Line>() {
        @Override
        public int compare(Line l1, Line l2) {
            return (int) (l2.getLength() - l1.getLength());
        }

    });

    ArrayList arr = new ArrayList<Line>();

    for (int i = 0; i < firstN + 1; i++) {
        if (lineList.size() >= firstN + 1) {
            double x1 = lineList.get(i).getX1(), y1 = lineList.get(i).getY1(), x2 = lineList.get(i).getX2(),
                    y2 = lineList.get(i).getY2();
            Point startPoint = new Point(x1, y1);
            Point endPoint = new Point(x2, y2);
            arr.add(lineList.get(i));
            Imgproc.line(img, startPoint, endPoint, new Scalar(0, 0, 255), 3);
        }
    }
    lineList = arr;
}

From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardDetect.java

License:Open Source License

/**
 * ????????/*from   w  ww .j a v a 2 s  .c  o  m*/
 * @param lineEq ?????(ax+by=1) ??[angle][section]
 * @param points ?ArrayList
 * @param img ????null??????
 * @return true:? false:
 */
private boolean calcSquare(StraightLineEquation lineEq[][], ArrayList<Point> points, Mat img) {
    //2??
    Mat mat = new Mat(2, 2, CvType.CV_32F);
    mPointCenterX = 0.0f;
    mPointCenterY = 0.0f;
    int counter = 0;
    for (int ang0sec = 0; ang0sec < 2; ang0sec++) {
        mat.put(0, 0, lineEq[0][ang0sec].a);
        mat.put(0, 1, lineEq[0][ang0sec].b);
        for (int ang1sec = 0; ang1sec < 2; ang1sec++) {
            mat.put(1, 0, lineEq[1][ang1sec].a);
            mat.put(1, 1, lineEq[1][ang1sec].b);
            Mat matAns;
            try {
                matAns = mat.inv();
                if (matAns == null)
                    return false;
            } catch (Exception e) {//??????????
                e.printStackTrace();
                return false;
            }
            float x = (float) (matAns.get(0, 0)[0] + matAns.get(0, 1)[0] + mCenterX);
            float y = (float) (matAns.get(1, 0)[0] + matAns.get(1, 1)[0] + mCenterY);
            Point p = new Point(x, y);
            points.add(p);
            mPointCenterX += x;
            mPointCenterY += y;
            counter++;
        }
    }
    mPointCenterX /= (float) counter;
    mPointCenterY /= (float) counter;
    //????
    Collections.sort(points, new PointComparator());
    if (img != null) {
        Scalar color[] = new Scalar[4];
        color[0] = new Scalar(0xff, 0x00, 0x00);
        color[1] = new Scalar(0x00, 0xff, 0x00);
        color[2] = new Scalar(0x00, 0x00, 0xff);
        color[3] = new Scalar(0xff, 0x00, 0xff);

        for (int i = 0; i < 4; i++) {
            Core.circle(img, points.get(i), 30, color[i], 5);
        }
    }
    if (MyDebug.DEBUG) {
        for (int i = 0; i < 4; i++) {
            Log.d(LOG_TAG, "point(" + i + ") = " + points.get(i).x + ":" + points.get(i).y);
        }
    }

    return true;
}