Example usage for org.opencv.core Mat get

List of usage examples for org.opencv.core Mat get

Introduction

In this page you can find the example usage for org.opencv.core Mat get.

Prototype

public double[] get(int row, int col) 

Source Link

Usage

From source file:KoImgProc.java

License:Open Source License

public static double[] getAverageBGRColor(int x, int y, int radius, Mat color) {
    int sum = 0;/*from  www.j  av a2s.c  om*/
    int offset = radius / 5;
    double averageBGRColourValue[] = new double[3];
    for (int i = 0; i < 3; i++) {
        sum += color.get(y, x)[i];
        sum += color.get(y, x + offset)[i];
        sum += color.get(y, x - offset)[i];
        sum += color.get(y + offset, x)[i];
        sum += color.get(y - offset, x)[i];
        sum += color.get(y + offset, x + offset)[i];
        sum += color.get(y - offset, x - offset)[i];
        sum += color.get(y + offset, x - offset)[i];
        sum += color.get(y - offset, x + offset)[i];
        averageBGRColourValue[i] = sum / 9;
        sum = 0;
    }
    return averageBGRColourValue;
}

From source file:ac.robinson.ticqr.TickBoxImageParserTask.java

License:Apache License

@Override
protected ArrayList<PointF> doInBackground(Void... unused) {
    Log.d(TAG, "Searching for tick boxes of " + mBoxSize + " size");

    // we look for *un-ticked* boxes, rather than ticked, as they are uniform in appearance (and hence easier to
    // detect) - they show up as a box within a box
    ArrayList<PointF> centrePoints = new ArrayList<>();
    int minimumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize, 2));
    int maximumOuterBoxArea = (int) Math.round(Math.pow(mBoxSize * 1.35f, 2));
    int minimumInnerBoxArea = (int) Math.round(Math.pow(mBoxSize * 0.5f, 2));

    // image adjustment - blurSize, blurSTDev and adaptiveThresholdSize must not be even numbers
    int blurSize = 9;
    int blurSTDev = 3;
    int adaptiveThresholdSize = Math.round(mBoxSize * 3); // (oddness ensured below)
    int adaptiveThresholdC = 4; // value to add to the mean (can be negative or zero)
    adaptiveThresholdSize = adaptiveThresholdSize % 2 == 0 ? adaptiveThresholdSize + 1 : adaptiveThresholdSize;

    // how similar the recognised polygon must be to its actual contour - lower is more similar
    float outerPolygonSimilarity = 0.045f;
    float innerPolygonSimilarity = 0.075f; // don't require as much accuracy for the inner part of the tick box

    // how large the maximum internal angle can be (e.g., for checking square shape)
    float maxOuterAngleCos = 0.3f;
    float maxInnerAngleCos = 0.4f;

    // use OpenCV to recognise boxes that have a box inside them - i.e. an un-ticked tick box
    // see: http://stackoverflow.com/a/11427501
    // Bitmap newBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true); // not needed
    Mat bitMat = new Mat();
    Utils.bitmapToMat(mBitmap, bitMat);//from   www  .ja  va2s. c  o  m

    // blur and convert to grey
    // alternative (less flexible): Imgproc.medianBlur(bitMat, bitMat, blurSize);
    Imgproc.GaussianBlur(bitMat, bitMat, new Size(blurSize, blurSize), blurSTDev, blurSTDev);
    Imgproc.cvtColor(bitMat, bitMat, Imgproc.COLOR_RGB2GRAY); // need 8uC1 (1 channel, unsigned char) image type

    // perform adaptive thresholding to detect edges
    // alternative (slower): Imgproc.Canny(bitMat, bitMat, 10, 20, 3, false);
    Imgproc.adaptiveThreshold(bitMat, bitMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY,
            adaptiveThresholdSize, adaptiveThresholdC);

    // get the contours in the image, and their hierarchy
    Mat hierarchyMat = new Mat();
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(bitMat, contours, hierarchyMat, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
    if (DEBUG) {
        Imgproc.drawContours(bitMat, contours, -1, new Scalar(30, 255, 255), 1);
    }

    // parse the contours and look for a box containing another box, with similar enough sizes
    int numContours = contours.size();
    ArrayList<Integer> searchedContours = new ArrayList<>();
    Log.d(TAG, "Found " + numContours + " possible tick box areas");
    if (numContours > 0 && !hierarchyMat.empty()) {
        for (int i = 0; i < numContours; i++) {

            // the original detected contour
            MatOfPoint boxPoints = contours.get(i);

            // hierarchy key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num
            int childBox = (int) hierarchyMat.get(0, i)[2]; // usually the largest child (as we're doing RETR_TREE)
            if (childBox == -1) { // we only want elements that have children
                continue;
            } else {
                if (searchedContours.contains(childBox)) {
                    if (DEBUG) {
                        Log.d(TAG, "Ignoring duplicate box at first stage: " + childBox);
                    }
                    continue;
                } else {
                    searchedContours.add(childBox);
                }
            }

            // discard smaller (i.e. noise) outer box areas as soon as possible for speed
            // used to do Imgproc.isContourConvex(outerPoints) later, but the angle check covers this, so no need
            double originalArea = Math.abs(Imgproc.contourArea(boxPoints));
            if (originalArea < minimumOuterBoxArea) {
                // if (DEBUG) {
                // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                // Log.d(TAG, "Outer box too small");
                // }
                continue;
            }
            if (originalArea > maximumOuterBoxArea) {
                // if (DEBUG) {
                // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                // Log.d(TAG, "Outer box too big");
                // }
                continue;
            }

            // simplify the contours of the outer box - we want to detect four-sided shapes only
            MatOfPoint2f boxPoints2f = new MatOfPoint2f(boxPoints.toArray()); // Point2f for approxPolyDP
            Imgproc.approxPolyDP(boxPoints2f, boxPoints2f,
                    outerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true); // simplify the contour
            if (boxPoints2f.height() != 4) { // height is number of points
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer box not 4 points");
                }
                continue;
            }

            // check that the simplified outer box is approximately a square, angle-wise
            org.opencv.core.Point[] boxPointsArray = boxPoints2f.toArray();
            double maxCosine = 0;
            for (int j = 0; j < 4; j++) {
                org.opencv.core.Point pL = boxPointsArray[j];
                org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4];
                org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4];
                getLineAngle(pL, pIntersect, pR);
                maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR));
            }
            if (maxCosine > maxOuterAngleCos) {
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer angles not square enough");
                }
                continue;
            }

            // check that the simplified outer box is approximately a square, line length-wise
            double minLine = Double.MAX_VALUE;
            double maxLine = 0;
            for (int p = 1; p < 4; p++) {
                org.opencv.core.Point p1 = boxPointsArray[p - 1];
                org.opencv.core.Point p2 = boxPointsArray[p];
                double xd = p1.x - p2.x;
                double yd = p1.y - p2.y;
                double lineLength = Math.sqrt((xd * xd) + (yd * yd));
                minLine = Math.min(minLine, lineLength);
                maxLine = Math.max(maxLine, lineLength);
            }
            if (maxLine - minLine > minLine) {
                if (DEBUG) {
                    // drawPoints(bitMat, new MatOfPoint(boxPoints2f.toArray()), new Scalar(255, 255, 255), 1);
                    Log.d(TAG, "Outer lines not square enough");
                }
                continue;
            }

            // draw the outer box if debugging
            if (DEBUG) {
                MatOfPoint debugBoxPoints = new MatOfPoint(boxPointsArray);
                Log.d(TAG,
                        "Potential tick box: " + boxPoints2f.size() + ", " + "area: "
                                + Math.abs(Imgproc.contourArea(debugBoxPoints)) + " (min:" + minimumOuterBoxArea
                                + ", max:" + maximumOuterBoxArea + ")");
                drawPoints(bitMat, debugBoxPoints, new Scalar(50, 255, 255), 2);
            }

            // loop through the children - they should be in descending size order, but sometimes this is wrong
            boolean wrongBox = false;
            while (true) {
                if (DEBUG) {
                    Log.d(TAG, "Looping with box: " + childBox);
                }

                // we've previously tried a child - try the next one
                // key: 0 = next sibling num, 1 = previous sibling num, 2 = first child num, 3 = parent num
                if (wrongBox) {
                    childBox = (int) hierarchyMat.get(0, childBox)[0];
                    if (childBox == -1) {
                        break;
                    }
                    if (searchedContours.contains(childBox)) {
                        if (DEBUG) {
                            Log.d(TAG, "Ignoring duplicate box at loop stage: " + childBox);
                        }
                        break;
                    } else {
                        searchedContours.add(childBox);
                    }
                    //noinspection UnusedAssignment
                    wrongBox = false;
                }

                // perhaps this is the outer box - check its child has no children itself
                // (removed so tiny children (i.e. noise) don't mean we mis-detect an un-ticked box as ticked)
                // if (hierarchyMat.get(0, childBox)[2] != -1) {
                // continue;
                // }

                // check the size of the child box is large enough
                boxPoints = contours.get(childBox);
                originalArea = Math.abs(Imgproc.contourArea(boxPoints));
                if (originalArea < minimumInnerBoxArea) {
                    if (DEBUG) {
                        // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                        Log.d(TAG, "Inner box too small");
                    }
                    wrongBox = true;
                    continue;
                }

                // simplify the contours of the inner box - again, we want four-sided shapes only
                boxPoints2f = new MatOfPoint2f(boxPoints.toArray());
                Imgproc.approxPolyDP(boxPoints2f, boxPoints2f,
                        innerPolygonSimilarity * Imgproc.arcLength(boxPoints2f, true), true);
                if (boxPoints2f.height() != 4) { // height is number of points
                    // if (DEBUG) {
                    // drawPoints(bitMat, boxPoints, new Scalar(255, 255, 255), 1);
                    // }
                    Log.d(TAG, "Inner box fewer than 4 points"); // TODO: allow > 4 for low quality images?
                    wrongBox = true;
                    continue;
                }

                // check that the simplified inner box is approximately a square, angle-wise
                // higher tolerance because noise means if we get several inners, the box may not be quite square
                boxPointsArray = boxPoints2f.toArray();
                maxCosine = 0;
                for (int j = 0; j < 4; j++) {
                    org.opencv.core.Point pL = boxPointsArray[j];
                    org.opencv.core.Point pIntersect = boxPointsArray[(j + 1) % 4];
                    org.opencv.core.Point pR = boxPointsArray[(j + 2) % 4];
                    getLineAngle(pL, pIntersect, pR);
                    maxCosine = Math.max(maxCosine, getLineAngle(pL, pIntersect, pR));
                }
                if (maxCosine > maxInnerAngleCos) {
                    Log.d(TAG, "Inner angles not square enough");
                    wrongBox = true;
                    continue;
                }

                // this is probably an inner box - log if debugging
                if (DEBUG) {
                    Log.d(TAG,
                            "Un-ticked inner box: " + boxPoints2f.size() + ", " + "area: "
                                    + Math.abs(Imgproc.contourArea(new MatOfPoint2f(boxPointsArray)))
                                    + " (min: " + minimumInnerBoxArea + ")");
                }

                // find the inner box centre
                double centreX = (boxPointsArray[0].x + boxPointsArray[1].x + boxPointsArray[2].x
                        + boxPointsArray[3].x) / 4f;
                double centreY = (boxPointsArray[0].y + boxPointsArray[1].y + boxPointsArray[2].y
                        + boxPointsArray[3].y) / 4f;

                // draw the inner box if debugging
                if (DEBUG) {
                    drawPoints(bitMat, new MatOfPoint(boxPointsArray), new Scalar(255, 255, 255), 1);
                    Core.circle(bitMat, new org.opencv.core.Point(centreX, centreY), 3,
                            new Scalar(255, 255, 255));
                }

                // add to the list of boxes to check
                centrePoints.add(new PointF((float) centreX, (float) centreY));
                break;
            }
        }
    }

    Log.d(TAG, "Found " + centrePoints.size() + " un-ticked boxes");
    return centrePoints;
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void start(final ConnectedNode connectedNode) {
    // load OpenCV shared library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // read configuration variables from the ROS Runtime (configured in the
    // launch file)
    log = connectedNode.getLog();//  w w w.j a  va2  s.c  o  m

    // Read Marker Config
    markerConfig = MarkerConfig.createFromConfig(parameter.markerConfigFile(), parameter.patternDirectory());

    camp = getCameraInfo(connectedNode, parameter);

    // start to listen to transform messages in /tf in order to feed the
    // Transformer and lookup transforms
    final TransformationService transformationService = TransformationService.create(connectedNode);

    // Subscribe to Image
    Subscriber<sensor_msgs.Image> subscriberToImage = connectedNode.newSubscriber(parameter.cameraImageTopic(),
            sensor_msgs.Image._TYPE);

    ComputePose computePose = null;
    try {
        final Mat cameraMatrix = CameraParams.getCameraMatrix(camp);
        final MatOfDouble distCoeffs = CameraParams.getDistCoeffs(camp);
        computePose = ComputePose.create(markerConfig, new Size(camp.width(), camp.height()), cameraMatrix,
                distCoeffs, this.parameter.visualization());
    } catch (NyARException e) {
        logger.info("Cannot initialize ComputePose", e);
    } catch (FileNotFoundException e) {
        logger.info("Cannot find file when initialize ComputePose", e);
    }
    final ComputePose poseProcessor = computePose;
    final Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);
    final Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom = connectedNode.newPublisher("tf",
            tf2_msgs.TFMessage._TYPE);

    logger.info("My instance id is " + parameter.instanceId());
    if (heartbeatMonitor != null) {
        logger.info("Start waiting for arlocros id: " + (parameter.instanceId() - 1));
        while (true) {
            final Time currentTime = connectedNode.getCurrentTime();
            final Time lastHeartbeatTime = heartbeatMonitor.getLastTimeReceivedMessage();
            if (lastHeartbeatTime != null) {
                final Duration duration = currentTime.subtract(lastHeartbeatTime);
                if (duration.totalNsecs() > 3.0E8) {
                    logger.info("Not received any heartbeat for 300ms. Start running.");
                    break;
                }
            }
        }
    }

    subscriberToImage.addMessageListener(new MessageListener<sensor_msgs.Image>() {

        @Override
        public void onNewMessage(sensor_msgs.Image message) {
            //
            if (!message.getEncoding().toLowerCase().equals("rgb8")) {
                log.error("Sorry, " + message.getEncoding() + " Image encoding is not supported! EXITING");
                System.exit(-1);
            }
            if (camp != null) {
                try {
                    //
                    final Mat image = Utils.matFromImage(message);
                    // uncomment to add more contrast to the image
                    final Mat thresholdedImage = Utils.tresholdContrastBlackWhite(image,
                            parameter.filterBlockSize(), parameter.subtractedConstant(),
                            parameter.invertBlackWhiteColor());
                    image.release();
                    // Mat cannyimg = new Mat(image.height(), image.width(),
                    // CvType.CV_8UC3);
                    // Imgproc.Canny(image, cannyimg, 10, 100);
                    // Imshow.show(cannyimg);

                    // image.convertTo(image, -1, 1.5, 0);
                    // setup camera matrix and return vectors
                    // compute pose
                    final Mat rvec = new Mat(3, 1, CvType.CV_64F);
                    final MatOfDouble tvec = new MatOfDouble(1.0, 1.0, 1.0);
                    final boolean hasPose = poseProcessor.computePose(rvec, tvec, thresholdedImage);

                    if (!hasPose) {
                        return;
                    }

                    thresholdedImage.release();

                    // publish pose
                    final QuaternionHelper q = new QuaternionHelper();

                    // convert rotation vector result of solvepnp to rotation matrix
                    Mat R = new Mat(3, 3, CvType.CV_32FC1);
                    Calib3d.Rodrigues(rvec, R);
                    // see publishers before for documentation
                    final Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
                    R = R.t();
                    final double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
                    final double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
                    final double attitudeZ = Math.asin(R.get(1, 0)[0]);
                    q.setFromEuler(bankX, headingY, attitudeZ);
                    Core.multiply(R, new Scalar(-1), R);
                    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0);
                    R.release();
                    final org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(
                            q.getX(), q.getY(), q.getZ(), q.getW());
                    final double x = tvec_map_cam.get(0, 0)[0];
                    final double y = tvec_map_cam.get(1, 0)[0];
                    final double z = tvec_map_cam.get(2, 0)[0];
                    tvec_map_cam.release();

                    final org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x,
                            y, z);
                    final org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(
                            translation, rotation);

                    // odom to camera_rgb_optical_frame
                    final GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
                    final GraphName targetFrame = GraphName.of("base_link");
                    org.ros.rosjava_geometry.Transform transform_cam_base = null;

                    if (transformationService.canTransform(targetFrame, sourceFrame)) {
                        try {
                            transform_cam_base = transformationService.lookupTransform(targetFrame,
                                    sourceFrame);
                        } catch (Exception e) {
                            log.error(ExceptionUtils.getStackTrace(e));
                            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                    + "base_link! " + "However, will continue..");
                            // cancel this loop..no result can be computed
                            return;
                        }
                    } else {
                        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to "
                                + "base_link!" + " However, " + "will continue..");
                        // cancel this loop..no result can be computed
                        return;
                    }

                    // multiply results
                    org.ros.rosjava_geometry.Transform current_pose = org.ros.rosjava_geometry.Transform
                            .identity();
                    current_pose = current_pose.multiply(transform_map_cam);
                    current_pose = current_pose.multiply(transform_cam_base);

                    if (current_pose.getTranslation().getZ() < 0.5) {
                        return;
                    }

                    // check for plausibility of the pose by checking if movement
                    // exceeds max speed (defined) of the robot
                    if (parameter.badPoseReject()) {
                        Time current_timestamp = connectedNode.getCurrentTime();
                        // TODO Unfortunately, we do not have the tf timestamp at
                        // hand here. So we can only use the current timestamp.
                        double maxspeed = 5;
                        boolean goodpose = false;
                        // if (current_pose != null && current_timestamp != null) {
                        if ((last_pose != null && last_timestamp != null)
                                && !Double.isNaN(last_pose.getTranslation().getX())) {
                            // check speed of movement between last and current pose
                            double distance = PoseCompare.distance(current_pose, last_pose);
                            double timedelta = PoseCompare.timedelta(current_timestamp, last_timestamp);
                            if ((distance / timedelta) < maxspeed) {
                                if (smoothing) {
                                    double xold = last_pose.getTranslation().getX();
                                    double yold = last_pose.getTranslation().getY();
                                    double zold = last_pose.getTranslation().getZ();
                                    double xnew = current_pose.getTranslation().getX();
                                    double ynew = current_pose.getTranslation().getY();
                                    double znew = current_pose.getTranslation().getZ();
                                    final org.ros.rosjava_geometry.Vector3 smoothTranslation = new org.ros.rosjava_geometry.Vector3(
                                            (xold * 2 + xnew) / 3, (yold * 2 + ynew) / 3,
                                            (zold * 2 + znew) / 3);
                                    current_pose = new org.ros.rosjava_geometry.Transform(smoothTranslation,
                                            current_pose.getRotationAndScale());
                                    last_pose = current_pose;
                                }
                                last_pose = current_pose;
                                last_timestamp = current_timestamp;
                                goodpose = true;
                            } else {
                                log.info("distance " + distance + " time: " + timedelta + " --> Pose rejected");
                                log.info("current pose: " + current_pose.getTranslation().getX() + " "
                                        + current_pose.getTranslation().getY() + " "
                                        + current_pose.getTranslation().getZ());
                                log.info("last pose: " + last_pose.getTranslation().getX() + " "
                                        + last_pose.getTranslation().getY() + " "
                                        + last_pose.getTranslation().getZ());
                            }

                        } else {
                            last_pose = current_pose;
                            last_timestamp = current_timestamp;
                        }
                        // }
                        // bad pose rejection
                        if (!goodpose) {
                            return;
                        }
                    }

                    // set information to message
                    final geometry_msgs.PoseStamped posestamped = posePublisher.newMessage();
                    Pose pose = posestamped.getPose();
                    Quaternion orientation = pose.getOrientation();
                    Point point = pose.getPosition();

                    point.setX(current_pose.getTranslation().getX());

                    point.setY(current_pose.getTranslation().getY());

                    point.setZ(current_pose.getTranslation().getZ());

                    orientation.setW(current_pose.getRotationAndScale().getW());
                    orientation.setX(current_pose.getRotationAndScale().getX());
                    orientation.setY(current_pose.getRotationAndScale().getY());
                    orientation.setZ(current_pose.getRotationAndScale().getZ());

                    // frame_id too
                    posestamped.getHeader().setFrameId("map");
                    posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
                    posePublisher.publish(posestamped);
                    mostRecentPose.set(posestamped);

                    //                publishCamFrameToMarkerFrame(rvec, tvec, tfPublisherCamToMarker, connectedNode);
                    //                publishMapToOdom(
                    //                    rvec, tvec, transformationService, tfPublisherMapToOdom, connectedNode);
                    rvec.release();
                    tvec.release();

                } catch (Exception e) {
                    logger.info("An exception occurs.", e);
                }
            }
        }
    });
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void publishCamFrameToMarkerFrame(Mat rvec, Mat tvec,
        Publisher<tf2_msgs.TFMessage> tfPublisherCamToMarker, ConnectedNode connectedNode) {
    QuaternionHelper q = new QuaternionHelper();

    /*//from   w ww. j a v a  2  s  .com
     * http://euclideanspace.com/maths/geometry/rotations/
     * conversions/matrixToEuler/index.htm
     * http://stackoverflow.com/questions/12933284/rodrigues-into-
     * eulerangles-and-vice-versa
     *
     * heading = atan2(-m20,m00) attitude = asin(m10) bank =
     * atan2(-m12,m11)
     */
    // convert output rotation vector rvec to rotation matrix R
    Mat R = new Mat(3, 3, CvType.CV_32FC1);
    Calib3d.Rodrigues(rvec, R);
    // get rotations around X,Y,Z from rotation matrix R
    double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
    double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
    double attitudeZ = Math.asin(R.get(1, 0)[0]);
    R.release();
    // convert Euler angles to quarternion
    q.setFromEuler(bankX, headingY, attitudeZ);

    // set information to message
    TFMessage tfmessage = tfPublisherCamToMarker.newMessage();
    TransformStamped posestamped = connectedNode.getTopicMessageFactory()
            .newFromType(geometry_msgs.TransformStamped._TYPE);
    Transform transform = posestamped.getTransform();

    Quaternion orientation = transform.getRotation();
    Vector3 point = transform.getTranslation();
    point.setX(tvec.get(0, 0)[0]);
    point.setY(tvec.get(1, 0)[0]);
    point.setZ(tvec.get(2, 0)[0]);

    orientation.setW(q.getW());
    orientation.setX(q.getX());
    orientation.setY(q.getY());
    orientation.setZ(q.getZ());
    posestamped.getHeader().setFrameId(parameter.cameraFrameName());
    posestamped.setChildFrameId(parameter.markerFrameName());
    posestamped.getHeader().setStamp(connectedNode.getCurrentTime());
    // frame_id too
    tfmessage.getTransforms().add(posestamped);
    tfPublisherCamToMarker.publish(tfmessage);
}

From source file:arlocros.ArMarkerPoseEstimator.java

License:Apache License

private void publishMapToOdom(Mat rvec, Mat tvec, TransformationService transformationService,
        Publisher<tf2_msgs.TFMessage> tfPublisherMapToOdom, ConnectedNode connectedNode) {
    // compute transform map to odom from map to
    // camera_rgb_optical_frame and odom to camera_rgb_optical_frame

    // map to camera_rgb_optical_frame
    Mat tvec_map_cam = new MatOfDouble(1.0, 1.0, 1.0);
    QuaternionHelper q = new QuaternionHelper();
    // get rotation matrix R from solvepnp output rotation vector
    // rvec//from   w w w .  ja  v  a  2s .c  om
    Mat R = new Mat(3, 3, CvType.CV_32FC1);
    Calib3d.Rodrigues(rvec, R);
    // transpose R, because we need the transformation from
    // world(map) to camera
    R = R.t();
    // get rotation around X,Y,Z from R in radiants
    double bankX = Math.atan2(-R.get(1, 2)[0], R.get(1, 1)[0]);
    double headingY = Math.atan2(-R.get(2, 0)[0], R.get(0, 0)[0]);
    double attitudeZ = Math.asin(R.get(1, 0)[0]);
    q.setFromEuler(bankX, headingY, attitudeZ);
    // compute translation vector from world (map) to cam
    // tvec_map_cam
    Core.multiply(R, new Scalar(-1), R); // R=-R
    Core.gemm(R, tvec, 1, new Mat(), 0, tvec_map_cam, 0); // tvec_map_cam=R*tvec
    R.release();

    org.ros.rosjava_geometry.Quaternion rotation = new org.ros.rosjava_geometry.Quaternion(q.getX(), q.getY(),
            q.getZ(), q.getW());
    double x = tvec_map_cam.get(0, 0)[0];
    double y = tvec_map_cam.get(1, 0)[0];
    double z = tvec_map_cam.get(2, 0)[0];
    tvec_map_cam.release();
    // create a Transform Object that hold the transform map to cam
    org.ros.rosjava_geometry.Vector3 translation = new org.ros.rosjava_geometry.Vector3(x, y, z);
    org.ros.rosjava_geometry.Transform transform_map_cam = new org.ros.rosjava_geometry.Transform(translation,
            rotation);

    // odom to camera_rgb_optical_frame
    GraphName sourceFrame = GraphName.of(parameter.cameraFrameName());
    GraphName targetFrame = GraphName.of("odom");
    org.ros.rosjava_geometry.Transform transform_cam_odom = null;
    if (transformationService.canTransform(targetFrame, sourceFrame)) {
        try {
            transform_cam_odom = transformationService.lookupTransform(targetFrame, sourceFrame);
        } catch (Exception e) {
            log.error(ExceptionUtils.getStackTrace(e));
            log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! " + ""
                    + "" + "" + "" + "However, " + "will continue..");
            return;
        }
    } else {
        log.info("Cloud not get transformation from " + parameter.cameraFrameName() + " to " + "odom! "
                + "However, will " + "continue..");
        // cancel this loop..no result can be computed
        return;
    }
    // multiply results
    org.ros.rosjava_geometry.Transform result = org.ros.rosjava_geometry.Transform.identity();
    result = result.multiply(transform_map_cam);
    result = result.multiply(transform_cam_odom);

    // set information to ROS message
    TFMessage tfMessage = tfPublisherMapToOdom.newMessage();
    TransformStamped transformStamped = connectedNode.getTopicMessageFactory()
            .newFromType(geometry_msgs.TransformStamped._TYPE);
    Transform transform = transformStamped.getTransform();

    Quaternion orientation = transform.getRotation();
    Vector3 vector = transform.getTranslation();
    vector.setX(result.getTranslation().getX());
    vector.setY(result.getTranslation().getY());
    vector.setZ(result.getTranslation().getZ());

    orientation.setW(result.getRotationAndScale().getW());
    orientation.setX(result.getRotationAndScale().getX());
    orientation.setY(result.getRotationAndScale().getY());
    orientation.setZ(result.getRotationAndScale().getZ());
    transformStamped.getHeader().setFrameId("map");
    transformStamped.setChildFrameId("odom");
    transformStamped.getHeader().setStamp(connectedNode.getCurrentTime());
    // frame_id too
    tfMessage.getTransforms().add(transformStamped);
    tfPublisherMapToOdom.publish(tfMessage);
}

From source file:balldetection.BallDetection.java

/**
 * @param args the command line arguments
 * @throws java.io.IOException/*from ww w . ja  v a2  s  . c o m*/
 */
public static void main(String[] args) throws IOException {

    /* Set the Nimbus look and feel */
    //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
    /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
     * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html 
     */
    try {
        for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
            if ("Nimbus".equals(info.getName())) {
                javax.swing.UIManager.setLookAndFeel(info.getClassName());
                break;
            }
        }
    } catch (ClassNotFoundException | InstantiationException | IllegalAccessException
            | javax.swing.UnsupportedLookAndFeelException ex) {
        java.util.logging.Logger.getLogger(CameraWindow.class.getName()).log(java.util.logging.Level.SEVERE,
                null, ex);
    }
    //</editor-fold>

    CameraWindow cWindow = new CameraWindow();
    cWindow.setVisible(true);

    int radius = 0;
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    //intialization of matrices
    Mat circles = new Mat();
    gray = new Mat();
    hsv = new Mat();
    filter = new Mat();
    dst = new Mat();

    camera = new VideoCapture(0);
    Mat frame = new Mat();
    Webcam.ImagePanel panel = Webcam.createPanel(camera, "src");
    Webcam.ImagePanel panel2 = Webcam.createPanel(camera, "filter");
    Webcam.ImagePanel panel3 = Webcam.createPanel(camera, "dst");

    while (true) {

        camera.read(frame);
        src = frame;

        GaussianBlur(src, src, new Size(3, 3), 2, 2);
        Imgproc.cvtColor(src, hsv, Imgproc.COLOR_BGR2HSV);
        Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);

        Core.inRange(gray, new Scalar(20, 100, 100), new Scalar(30, 255, 255), gray);
        Core.inRange(hsv, new Scalar(cWindow.get_hLower(), cWindow.get_sLower(), cWindow.get_vLower()),
                new Scalar(cWindow.get_hUpper(), cWindow.get_sUpper(), cWindow.get_vUpper()), filter);

        Core.inRange(src, new Scalar(cWindow.get_hLower(), cWindow.get_sLower(), cWindow.get_vLower()),
                new Scalar(cWindow.get_hUpper(), cWindow.get_sUpper(), cWindow.get_vUpper()), dst);

        double[] temp = hsv.get(hsv.rows() / 2, hsv.cols() / 2);
        System.out.println(temp[0] + ", " + temp[1] + ", " + temp[2] + ", " + radius);
        //System.out.println("Current Distance from ball: " + ((2.5366*radius) - 123.02));

        Imgproc.HoughCircles(filter, circles, CV_HOUGH_GRADIENT, cWindow.get_dp(), filter.rows() / 2,
                cWindow.get_param1(), cWindow.get_param2(), cWindow.get_minCircleSize(),
                cWindow.get_maxCircleSize());

        for (int i = 0; i < circles.cols(); i++) {
            Point center = new Point(Math.round(circles.get(0, i)[0]), Math.round(circles.get(0, i)[1]));
            radius = (int) Math.round(circles.get(0, i)[2]);
            // draw the circle center
            Core.circle(src, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
            // draw the circle outline
            Core.circle(src, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
            //System.out.println("" + circles.get(0,0)[0] + ", " + circles.get(0,0)[1] + ", " + circles.get(0,0)[2]);
        }

        panel.updateImage(toBufferedImage(src));
        panel2.updateImage(toBufferedImage(filter));
        panel3.updateImage(toBufferedImage(dst));
    }
}

From source file:br.com.prj.TelaPrincipal.java

public void adicionarLabel(final Image img, final Rect faceEncontrada) {

    JLabel lbImagem = new JLabel() {
        public void paintComponent(Graphics g) {
            super.paintComponent(g);
            g.drawImage(img, 0, 0, getWidth() - 5, getHeight() - 5, null);
        }/*from w  w w. ja v  a  2s .co  m*/
    };

    final Integer ultimoPixelColuna = faceEncontrada.width + faceEncontrada.x;
    final Integer ultimoPixelLinha = faceEncontrada.height + faceEncontrada.y;

    final Mat mat = convertImageToMat(img);
    Imgproc.medianBlur(mat, mat, 17);

    lbImagem.addMouseListener(new MouseAdapter() {

        private Boolean blur = false;

        @Override
        public void mouseClicked(MouseEvent e) {

            int i = -1, j = 0;
            for (int linha = faceEncontrada.y; linha <= ultimoPixelLinha; linha++) {
                j = 0;
                i++;
                for (int coluna = faceEncontrada.x; coluna < ultimoPixelColuna; coluna++) {
                    double[] cor = mat.get(i, j++);
                    if (cor != null) {
                        imagemCarregada.put(linha, coluna, new double[] { cor[0], cor[1], cor[2] });
                    }
                }
            }

            if (!blur) {
                Imgcodecs.imwrite("img/imagem_processada.jpg", imagemCarregada);
            } else {
                System.out.println("remover blur");
            }

            blur = !blur;
            imgCarregada.setIcon(resizeImage("img/imagem_processada.jpg", imgCarregada.getWidth(),
                    imgCarregada.getHeight()));
        }

    });

    // Posiciona a imagem
    lbImagem.setBounds(boundX, boundY, 70, 70);
    lbImagem.setVisible(rootPaneCheckingEnabled);

    // Prepara a posio x do prximo label
    boundX += 70;

    // Verifica se estourou o limite
    if (boundX >= jPanel1.getWidth() - 70) {
        // Desce uma fila
        boundY += 70;
        boundX = 10;
    }

    jPanel1.add(lbImagem);
    jPanel1.repaint();

}

From source file:by.zuyeu.deyestracker.core.util.CVCoreUtils.java

public static void insertSubmatByRect(Mat subImage, Rect rect, Mat origImage) {
    double colScale = 1.0 * origImage.cols() / origImage.width();
    int colStart = (int) (1.0 * rect.x * colScale);
    double rowScale = 1.0 * origImage.rows() / origImage.height();
    int rowStart = (int) (1.0 * rect.y * rowScale);
    for (int x1 = 0, x2 = colStart; x1 < subImage.cols(); x1++, x2++) {
        for (int y1 = 0, y2 = rowStart; y1 < subImage.rows(); y1++, y2++) {
            final double[] subImgData = subImage.get(y1, x1);
            origImage.put(y2, x2, subImgData);
        }//from www  .  j  a va2  s. c om
    }
}

From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatOperation.java

License:Open Source License

public static Rect[] rotateFaces(Mat img, Rect[] faces, int angle) {
    Point center = new Point(img.cols() / 2, img.rows() / 2);
    Mat rotMat = Imgproc.getRotationMatrix2D(center, angle, 1);
    rotMat.convertTo(rotMat, CvType.CV_32FC1);
    float scale = img.cols() / img.rows();
    for (Rect face : faces) {
        Mat m = new Mat(3, 1, CvType.CV_32FC1);
        m.put(0, 0, face.x);// ww w. j a  va2  s .c  o  m
        m.put(1, 0, face.y);
        m.put(2, 0, 1);
        Mat res = Mat.zeros(2, 1, CvType.CV_32FC1);
        Core.gemm(rotMat, m, 1, new Mat(), 0, res, 0);
        face.x = (int) res.get(0, 0)[0];
        face.y = (int) res.get(1, 0)[0];
        if (angle == 270 || angle == -90) {
            face.x = (int) (face.x * scale - face.width);
            face.x = face.x + face.width / 4;
            face.y = face.y + face.height / 4;
        } else if (angle == 180 || angle == -180) {
            face.x = face.x - face.width;
            face.y = face.y - face.height;
        } else if (angle == 90 || angle == -270) {
            face.y = (int) (face.y * scale - face.height);
            face.x = face.x - face.width / 4;
            face.y = face.y - face.height / 4;
        }
    }
    return faces;
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.Contours.LocalBinaryPattern.java

License:Open Source License

@Override
public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images) {
        // Resize for Performance enhancement
        Size size = new Size(preProcessor.getN(), preProcessor.getN());
        Imgproc.resize(img, img, size);// w ww.  j a  va  2 s.c  om
        Mat lbp = new Mat(img.rows() - 2, img.cols() - 2, img.type());
        for (int i = 1; i < img.rows() - 1; i++) {
            for (int j = 1; j < img.cols() - 1; j++) {
                BitSet out = new BitSet(8);
                double cen = img.get(i, j)[0];
                if (img.get(i - 1, j - 1)[0] > cen)
                    out.set(0);
                if (img.get(i - 1, j)[0] > cen)
                    out.set(1);
                if (img.get(i - 1, j + 1)[0] > cen)
                    out.set(2);
                if (img.get(i, j + 1)[0] > cen)
                    out.set(3);
                if (img.get(i + 1, j + 1)[0] > cen)
                    out.set(4);
                if (img.get(i + 1, j)[0] > cen)
                    out.set(5);
                if (img.get(i + 1, j - 1)[0] > cen)
                    out.set(6);
                if (img.get(i, j - 1)[0] > cen)
                    out.set(7);
                int value = 0;
                for (int k = 0; k < out.length(); k++) {
                    int index = out.nextSetBit(k);
                    value += Math.pow(2, out.length() - 1 - index);
                    k = index;
                }
                lbp.put(i - 1, j - 1, value);
            }
        }
        processed.add(lbp);
    }
    preProcessor.setImages(processed);
    return preProcessor;
}