Example usage for org.opencv.core Mat cols

List of usage examples for org.opencv.core Mat cols

Introduction

In this page you can find the example usage for org.opencv.core Mat cols.

Prototype

public int cols() 

Source Link

Usage

From source file:org.firstinspires.ftc.teamcode.vision.VisionLib.java

public double getCenterVortexWidth() {
    Mat matIn = getCameraMat();
    if (matIn != null) {
        Log.d(TAG, "mat null");
        Imgproc.cvtColor(matIn, matIn, Imgproc.COLOR_RGB2HSV);

        Mat matMasked = new Mat(matIn.rows(), matIn.cols(), CvType.CV_8UC1);
        Scalar vortexLowerThresh = new Scalar(37, 46, 34);
        Scalar vortexUpperThresh = new Scalar(163, 255, 255);

        Core.inRange(matIn, BLUE_LOWER_THRESH, BLUE_UPPER_THRESH, matMasked);

        //find largest contour (the part of the beacon we are interested in
        ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Mat hierarchy = new Mat();
        Mat contourMat = matMasked.clone();
        Imgproc.findContours(contourMat, contours, hierarchy, Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);

        if (contours.size() > 1) {
            int largestContourIndex = 0;
            double lastContourArea = 0;
            for (int i = 0; i < contours.size(); i++) {
                double contourArea = Imgproc.contourArea(contours.get(i));
                if (contourArea > lastContourArea) {
                    largestContourIndex = i;
                    lastContourArea = contourArea;
                }//from www.  j a  v  a 2  s.  co  m
            }
            //get bounding rect
            Rect boundingRect = Imgproc
                    .boundingRect(new MatOfPoint(contours.get(largestContourIndex).toArray()));
            Core.rectangle(matIn, new Point(boundingRect.x, boundingRect.y),
                    new Point(boundingRect.x + boundingRect.width, boundingRect.y + boundingRect.height),
                    OPEN_CV_GREEN);

            saveMatToDisk(matIn);//debug only

            return boundingRect.width;
        }
    }
    return -1;
}

From source file:org.firstinspires.ftc.teamcode.vision.VisionLib.java

public int getBlueSide() {
    Mat matIn = getCameraMat();
    if (matIn != null) {
        Imgproc.cvtColor(matIn, matIn, Imgproc.COLOR_RGB2HSV);

        Mat matMasked = new Mat(matIn.rows(), matIn.cols(), CvType.CV_8UC1);
        Core.inRange(matIn, BLUE_LOWER_THRESH, BLUE_UPPER_THRESH, matMasked);

        //find largest contour (the part of the beacon we are interested in
        ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Mat hierarchy = new Mat();
        Mat contourMat = matMasked.clone();
        Imgproc.findContours(contourMat, contours, hierarchy, Imgproc.RETR_EXTERNAL,
                Imgproc.CHAIN_APPROX_SIMPLE);

        if (contours.size() > 1) {
            int largestContourIndex = 0;
            double lastContourArea = 0;
            for (int i = 0; i < contours.size(); i++) {
                double contourArea = Imgproc.contourArea(contours.get(i));
                if (contourArea > lastContourArea) {
                    largestContourIndex = i;
                    lastContourArea = contourArea;
                }/*from   w  w w. j av a  2  s  . c o  m*/
            }
            //get bounding rect
            Rect boundingRect = Imgproc
                    .boundingRect(new MatOfPoint(contours.get(largestContourIndex).toArray()));
            Core.rectangle(matIn, new Point(boundingRect.x, boundingRect.y),
                    new Point(boundingRect.x + boundingRect.width, boundingRect.y + boundingRect.height),
                    OPEN_CV_GREEN);

            //                saveMatToDisk(matIn);//debug only
            //find which side its on
            if (boundingRect.x > matIn.cols() / 2) {//depends on which camera we use
                Log.d(TAG, "left");
                return BLUE_LEFT;
            } else {
                Log.d(TAG, "right");
                return BLUE_RIGHT;
            }
        }
        Log.d(TAG, "countors:" + contours.size());
        return TEST_FAILED;
    }
    return TEST_FAILED;
}

From source file:org.firstinspires.ftc.teamcode.VuforiaColor.java

public void runOpMode() throws InterruptedException {
    //        frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    //        backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    //        frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    //        backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    //        rollerMotor = hardwareMap.dcMotor.get("rollerMotor");
    //// w  w  w  .  j  av  a 2  s  .  co  m
    //        backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    //        backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.FRONT;
    this.vuforia = ClassFactory.createVuforiaLocalizer(parameters);
    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true); //enables RGB565 format for the image
    vuforia.setFrameQueueCapacity(1); //tells VuforiaLocalizer to only store one frame at a time
    piController = new PIController(.0016, 0.00013, 0.00023, 0.000012);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 1);
    VuforiaTrackables visionTargets = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    VuforiaTrackable wheelsTarget = visionTargets.get(0);
    wheelsTarget.setName("Wheels"); // Wheels

    VuforiaTrackable toolsTarget = visionTargets.get(1);
    toolsTarget.setName("Tools"); // Tools

    VuforiaTrackable legosTarget = visionTargets.get(2);
    legosTarget.setName("Legos"); // Legos

    VuforiaTrackable gearsTarget = visionTargets.get(3);
    gearsTarget.setName("Gears"); // Gears

    /** For convenience, gather together all the trackable objects in one easily-iterable collection */
    List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
    allTrackables.addAll(visionTargets);

    /**
     * We use units of mm here because that's the recommended units of measurement for the
     * size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
     *      <ImageTarget name="stones" size="247 173"/>
     * You don't *have to* use mm here, but the units here and the units used in the XML
     * target configuration files *must* correspond for the math to work out correctly.
     */
    float mmPerInch = 25.4f;
    float mmBotLength = 16 * mmPerInch;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;
    float mmPhoneZOffset = 5.5f * mmPerInch;

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    gearsTarget.setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    toolsTarget.setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    /*
     * To place the Wheels and Legos Targets on the Blue Audience wall:
     * - First we rotate it 90 around the field's X axis to flip it upright
     * - Finally, we translate it along the Y axis towards the blue audience wall.
     */
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    wheelsTarget.setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    legosTarget.setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    /**
     * Create a transformation matrix describing where the phone is on the robot. Here, we
     * put the phone on the right hand side of the robot with the screen facing in (see our
     * choice of BACK camera above) and in landscape mode. Starting from alignment between the
     * robot's and phone's axes, this is a rotation of -90deg along the Y axis.
     *
     * When determining whether a rotation is positive or negative, consider yourself as looking
     * down the (positive) axis of rotation from the positive towards the origin. Positive rotations
     * are then CCW, and negative rotations CW. An example: consider looking down the positive Z
     * axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
     * plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
     */
    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, mmPhoneZOffset)
            .multiplied(Orientation.getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES,
                    0, 180, 0));
    RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) visionTargets.get(0).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(1).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(2).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(3).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();
    waitForStart();

    /** Start tracking the data sets we care about. */
    visionTargets.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    telemetry.addData("Loop", "Out");
    telemetry.update();
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        VuforiaLocalizer.CloseableFrame frame = vuforia.getFrameQueue().take();
        telemetry.update();
        if (frame != null) {
            Image rgb = null;
            long numImages = frame.getNumImages();

            for (int i = 0; i < numImages; i++) {
                if (frame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
                    rgb = frame.getImage(i);
                    break;
                } //if
            } //for

            if (rgb != null) {
                Bitmap bmp = Bitmap.createBitmap(rgb.getWidth(), rgb.getHeight(), Bitmap.Config.RGB_565);
                bmp.copyPixelsFromBuffer(rgb.getPixels());

                img = new Mat();
                Utils.bitmapToMat(bmp, img);
                telemetry.addData("Img", "Converted");
                telemetry.update();
            }
        }

        for (VuforiaTrackable beacon : allTrackables) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (upperLeft.getData()[0] - lowerLeft.getData()[0]);
                    int width = (int) (upperRight.getData()[1] - upperLeft.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 1
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 1 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);
                    telemetry.addData(img.rows() + "", img.cols());
                    telemetry.update();

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(230, 75, 255)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(130, 150, 255)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(130, 150, 255)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(230, 75, 255)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", piController.processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:org.firstinspires.ftc.teamcode.VuforiaMovement.java

public void runOpMode() throws InterruptedException {
    frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    rollerMotor = hardwareMap.dcMotor.get("rollerMotor");

    backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    this.vuforia = new VuforiaLocalizerImplSubclass(parameters);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);
    VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    beacons.get(0).setName("Wheels");
    beacons.get(1).setName("Tools");
    beacons.get(2).setName("Legos");
    beacons.get(3).setName("Gears");

    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;

    // Initialize the location of the targets and phone on the field
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(0).setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(1).setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(2).setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(3).setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, 0).multiplied(Orientation
            .getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES, -90, 0, 0));
    RobotLog.ii(TAG, "Phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) beacons.get(0).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(1).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(2).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(3).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();//ww  w. j  a v a2s.c  o m
    waitForStart();

    /** Start tracking the data sets we care about. */
    beacons.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    movingToFirstBeacon = false;
    liningUpWithFirstBeacon = false;
    movingToSecondBeacon = false;
    liningUpWithSecondBeacon = false;
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        if (movingToFirstBeacon) {
            // TODO Estimate distance to the beacon from a point TBD
            // TODO Estimate distance to move forward and turn to face the beacon until second movement set
            // Move this outside the loop?

            // Move forward until you see the beacon
            while (movingToFirstBeacon) {
                // Move in increments to minimize the times you check the trackables
                for (int i = 0; i < 50; i++) {
                    frontRightMotor.setPower(1);
                    backRightMotor.setPower(1);
                    frontLeftMotor.setPower(1);
                    backLeftMotor.setPower(1);
                }

                for (VuforiaTrackable beacon : beacons) {
                    // Add beacon to telemetry if visible
                    if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                        visibleTarget = beacon.getName();
                        telemetry.addData(visibleTarget, "Visible");
                    }

                    OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon
                            .getListener()).getUpdatedRobotLocation();
                    if (robotLocationTransform != null) {
                        lastLocation = robotLocationTransform;
                    }
                }

                // Move to the beacon until the beacon is in sight
                if (lastLocation != null) {
                    movingToFirstBeacon = false; // Only execute this once
                }
            }
        }

        while (liningUpWithFirstBeacon) {
            for (VuforiaTrackable beacon : beacons) {
                // Add beacon to telemetry if visible
                if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                    visibleTarget = beacon.getName();
                    telemetry.addData(visibleTarget, "Visible");
                }

                OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                        .getUpdatedRobotLocation();
                if (robotLocationTransform != null) {
                    lastLocation = robotLocationTransform;
                }
            }

            RobotMovement movement = processLocation(lastLocation, visibleTarget);
            if (movement.isNoMovement()) {
                liningUpWithFirstBeacon = false;
            }

            processMovement(movement);
        }

        if (movingToSecondBeacon) {
            // TODO Estimate the movements/distance from the first beacon to the second
            movingToSecondBeacon = false; // Only execute this once
        }

        if (vuforia.rgb != null && !isButtonHit) {
            Bitmap bmp = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bmp.copyPixelsFromBuffer(vuforia.rgb.getPixels());

            img = new Mat();
            Utils.bitmapToMat(bmp, img);
        }

        for (VuforiaTrackable beacon : beacons) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */
                // DOES NOT WORK???
                degreesToTurn = Math.toDegrees(Math.atan2(translation.get(1), translation.get(2)));
                telemetry.addData("Degrees-", degreesToTurn);
                // TODO Check degreee turning threshold
                if (Math.abs(degreesToTurn) > 15) {
                    // Turn after doing calculating transformations
                    needToTurn = true;
                }

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);
                    telemetry.addData(beacon.getName() + "-Degrees", degreesToTurn);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (lowerLeft.getData()[0] - upperLeft.getData()[0]);
                    int width = (int) (upperLeft.getData()[1] - upperRight.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 0
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 0 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit && !needToTurn) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:org.firstinspires.ftc.teamcode.VuforiaOp.java

public int getBeaconConfig(Image img, VuforiaTrackableDefaultListener beacon, CameraCalibration camCal) {

    OpenGLMatrix pose = beacon.getRawPose();

    if (pose != null && img != null && img.getPixels() != null) {

        Matrix34F rawPose = new Matrix34F();
        float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);

        rawPose.setData(poseData);/*from www. jav  a 2  s.  co m*/

        //calculating pixel coordinates of beacon corners
        float[][] corners = new float[4][2];

        corners[0] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 276, 0)).getData(); //upper left of beacon
        corners[1] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 276, 0)).getData(); //upper right of beacon
        corners[2] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 92, 0)).getData(); //lower right of beacon
        corners[3] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 92, 0)).getData(); //lower left of beacon

        //getting camera image...
        Bitmap bm = Bitmap.createBitmap(img.getWidth(), img.getHeight(), Bitmap.Config.RGB_565);
        bm.copyPixelsFromBuffer(img.getPixels());

        //turning the corner pixel coordinates into a proper bounding box
        Mat crop = OCVUtils.bitmapToMat(bm, CvType.CV_8UC3);
        float x = Math.min(Math.min(corners[1][0], corners[3][0]), Math.min(corners[0][0], corners[2][0]));
        float y = Math.min(Math.min(corners[1][1], corners[3][1]), Math.min(corners[0][1], corners[2][1]));
        float width = Math.max(Math.abs(corners[0][0] - corners[2][0]),
                Math.abs(corners[1][0] - corners[3][0]));
        float height = Math.max(Math.abs(corners[0][1] - corners[2][1]),
                Math.abs(corners[1][1] - corners[3][1]));

        //make sure our bounding box doesn't go outside of the image
        //OpenCV doesn't like that...
        x = Math.max(x, 0);
        y = Math.max(y, 0);
        width = (x + width > crop.cols()) ? crop.cols() - x : width;
        height = (y + height > crop.rows()) ? crop.rows() - y : height;

        //cropping bounding box out of camera image
        final Mat cropped = new Mat(crop, new Rect((int) x, (int) y, (int) width, (int) height));

        Bitmap pic = OCVUtils.matToBitmap(cropped);
        //filtering out non-beacon-blue colours in HSV colour space
        Imgproc.cvtColor(cropped, cropped, Imgproc.COLOR_RGB2HSV_FULL);

        /*try
        {
        FileOutputStream out = new FileOutputStream(new File("/storage/emulated/0/", "poop.txt"));
        out.write((new String("ppoooop")).getBytes());
        out.close();
        } catch (FileNotFoundException e){}
        catch (IOException e){}
        */

        /*try
        {
        FileOutputStream fos = new FileOutputStream(new File("/storage/emulated/0/", "cropped.png"));
                
        //bm.compress(Bitmap.CompressFormat.PNG, 90, fos);
        if (pic.compress(Bitmap.CompressFormat.PNG, 100, fos))
        {
        }
        else
        {
                
        }
        fos.close();
        }catch (IOException e)
        {}
                
        try
        {
        FileOutputStream fos = new FileOutputStream(new File("/storage/emulated/0/", "non.png"));
                
        //bm.compress(Bitmap.CompressFormat.PNG, 90, fos);
        if (bm.compress(Bitmap.CompressFormat.PNG, 100, fos))
        {
        }
        else
        {
            tempLog("didgfeds");
        }
        fos.close();
        }catch (IOException e)
        {}
        */

        //get filtered mask
        //if pixel is within acceptable blue-beacon-colour range, it's changed to white.
        //Otherwise, it's turned to black
        Mat mask = new Mat();

        Core.inRange(cropped, BEACON_BLUE_LOW, BEACON_BLUE_HIGH, mask);
        Moments mmnts = Imgproc.moments(mask, true);

        //calculating centroid of the resulting binary mask via image moments
        Log.i("CentroidX", "" + ((mmnts.get_m10() / mmnts.get_m00())));
        Log.i("CentroidY", "" + ((mmnts.get_m01() / mmnts.get_m00())));

        //checking if blue either takes up the majority of the image (which means the beacon is all blue)
        //or if there's barely any blue in the image (which means the beacon is all red or off)
        //            if (mmnts.get_m00() / mask.total() > 0.8) {
        //                return VortexUtils.BEACON_ALL_BLUE;
        //            } else if (mmnts.get_m00() / mask.total() < 0.1) {
        //                return VortexUtils.BEACON_NO_BLUE;
        //            }//elseif

        //Note: for some reason, we end up with a image that is rotated 90 degrees
        //if centroid is in the bottom half of the image, the blue beacon is on the left
        //if the centroid is in the top half, the blue beacon is on the right
        if ((mmnts.get_m01() / mmnts.get_m00()) < cropped.rows() / 2) {
            return VortexUtils.BEACON_RED_BLUE;
        } else {
            return VortexUtils.BEACON_BLUE_RED;
        } //else
    } //if

    return VortexUtils.NOT_VISIBLE;
}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

public void executePreprocessor() {
    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    Mat mat = originalMat.createGrayImage();
    showImageResult(mat, "nh gc");
    long blackCount = 0;
    long whiteCount = 0;

    for (int i = 0; i < mat.rows(); i++) {
        for (int j = 0; j < mat.cols(); j++) {
            double temp[] = mat.get(i, j);
            if (temp[0] > 230)
                whiteCount++;//from   w w w  .j  av a2s . c  om
            else if (temp[0] < 35)
                blackCount++;
        }
    }
    int index = 0;
    for (int i = 0; i < mat.rows(); i += 16) {
        for (int j = 0; j < mat.cols(); j += 8) {
            Rect rect = new Rect(new Point(i, j), new Size(8, 16));
            index++;
            System.out.println(rect.toString());
        }
    }
    System.out.println(index);
    ThresholdMat thresholdMat = new ThresholdMat(mat, 0, 255, Imgproc.THRESH_OTSU);
    Mat mat1 = thresholdMat.createMatResult();
    if (blackCount > whiteCount) {
        showImageResult(mat1, "nh ly  ly ngng");

        CloseMat openMat = new CloseMat(mat1, Imgproc.MORPH_RECT, 5, 5, 1);
        Mat mat2 = openMat.createMatResult();
        showImageResult(mat2, "Thut ton open");
    } else {

    }
}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

public void licensePlateDetection() {

    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    // Step 1/*w  w  w  .ja  v a 2s  . co  m*/
    Mat mat = originalMat.createGrayImage();

    long blackCount = 0;
    long whiteCount = 0;
    for (int i = 0; i < mat.rows(); i++) {
        for (int j = 0; j < mat.cols(); j++) {
            double temp[] = mat.get(i, j);
            if (temp[0] > 230)
                whiteCount++;
            else if (temp[0] < 35)
                blackCount++;
        }
    }
    System.out.println("whiteCount: " + whiteCount);
    System.out.println("blackCount: " + blackCount);
    // ImageResult imageResult1 = new ImageResult(mat,
    // "GrayImage1");
    // imageResult1.showResultImage();

    // Step 2
    BilateralFilteringMat bilateralFilteringMat = new BilateralFilteringMat(mat, 75, 75, 1);
    Mat mat2 = bilateralFilteringMat.createMatResult();

    // ImageResult imageResult2 = new ImageResult(mat2,
    // "Gauss");
    // imageResult2.showResultImage();
    // Step 3
    HistogramEqualizationMat histogramEqualizationMat = new HistogramEqualizationMat(mat2);

    Mat mat3 = histogramEqualizationMat.createMatResult();
    // ImageResult imageResult3 = new ImageResult(mat3,
    // "HistogramEqualizationMat");
    // imageResult3.showResultImage();

    // Step 4
    OpenMat openMat = new OpenMat(mat3, Imgproc.MORPH_RECT, 5, 5, 2.2);

    Mat mat4 = openMat.createMatResult();
    // ImageResult imageResult4 = new ImageResult(mat4,
    // "OpenMat");
    // imageResult4.showResultImage();

    // Step 5
    SubtractMat subtractMat = new SubtractMat(mat4, mat3);

    Mat mat5 = subtractMat.createMatResult();
    // ImageResult imageResult5 = new ImageResult(mat5,
    // "SubtractMat");
    // imageResult5.showResultImage();
    //
    // Step 6
    ThresholdMat thresholdMat = new ThresholdMat(mat5, 0, 255, Imgproc.THRESH_OTSU);

    // Mat mat6 = thresholdMat.createMatResult();
    // ImageResult imageResult6 = new ImageResult(mat6,
    // "THRESH_OTSU");
    // imageResult6.showResultImage();

    // //Step 7
    // CannyMat cannyMat = new CannyMat(mat6, 250,
    // 255);
    //
    // Mat mat7 = cannyMat.createMatResult();
    // ImageResult imageResult7 = new ImageResult(mat7,
    // "GrayImage7");
    // imageResult7.showResultImage();
    //
    // //Step 8
    // MorphologyMatBase morphologyMatBase = new MorphologyMatBase(
    // mat7, Imgproc.MORPH_RECT, 3, 3, 1);
    // Mat mat8 = morphologyMatBase.dilate();
    // ImageResult imageResult8 = new ImageResult(mat8,
    // "GrayImage8");
    // imageResult8.showResultImage();
    //
    // //Step 9
    // RectangleDetection rect = new RectangleDetection(mat8);
    // ImageResult imageResult = new
    // ImageResult(rect.executeRectangleDetection(),
    // "GrayImage9");
    // imageResult.showResultImage();

}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

/**
 * I'am working here./* w  ww  .j  a va 2 s  .  co  m*/
 * 
 * @return
 */
public List<Mat> processImagePointBlackBiggerThanPointWhite() {

    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    // Step 1
    Mat mat = originalMat.createGrayImage();

    ThresholdMat thresholdMat = new ThresholdMat(mat.clone(), 0, 255, Imgproc.THRESH_OTSU);
    Mat threshold = thresholdMat.createMatResult();

    MorphologyMatBase closeMat = new MorphologyMatBase(threshold, Imgproc.MORPH_RECT, 1, 1, 1);
    Mat close = closeMat.dilate();

    RectangleDetection rectangleDetection = new RectangleDetection(close);

    List<MatOfPoint> contoursDetectPlate = rectangleDetection.executeRectangleDetection();

    SubMat subMatDetectPlate = new SubMat(mat, contoursDetectPlate);

    // Get plate detected
    List<Mat> detectPlates = subMatDetectPlate.dropImage();
    Mat matDetectPlate = detectPlates.get(5);

    ImageResult imageResult = new ImageResult(matDetectPlate, "Result ");
    imageResult.showResultImage();

    // pre-process
    Mat matResult = new Mat(matDetectPlate.cols() * 2, matDetectPlate.rows() * 2, matDetectPlate.type());
    Imgproc.resize(matDetectPlate, matResult, new Size(matDetectPlate.cols() * 1, matDetectPlate.rows() * 1));

    ThresholdMat thresholdMatDetectPlate = new ThresholdMat(matResult, 0, 255, Imgproc.THRESH_OTSU);

    Mat thresholdMatDetectPlateMat = thresholdMatDetectPlate.createMatResult();

    ImageResult imageResults = new ImageResult(thresholdMatDetectPlateMat, "Result ");
    imageResults.showResultImage();

    CharacterSegment characterSegment = new CharacterSegment(thresholdMatDetectPlateMat.clone());

    List<MatOfPoint> contoursNumber = characterSegment.executeCharacterSegment();

    System.out.println(contoursNumber.size());
    SubMat subMatNumberImg = new SubMat(thresholdMatDetectPlateMat.clone(), contoursNumber);

    List<Mat> listNumberImg = subMatNumberImg.dropImage();

    return listNumberImg;
}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

public Map<String, List<Mat>> processImagePointBlackBiggerThanPointWhiteTest() {

    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    // Step 1/*from   w w  w. j a va 2 s . co  m*/
    Mat mat = originalMat.createGrayImage();

    ThresholdMat thresholdMat = new ThresholdMat(mat.clone(), 0, 255, Imgproc.THRESH_OTSU);
    Mat threshold = thresholdMat.createMatResult();

    MorphologyMatBase closeMat = new MorphologyMatBase(threshold, Imgproc.MORPH_RECT, 1, 1, 1);
    Mat close = closeMat.dilate();

    RectangleDetection rectangleDetection = new RectangleDetection(close);

    List<MatOfPoint> contoursDetectPlate = rectangleDetection.executeRectangleDetection();

    SubMat subMatDetectPlate = new SubMat(mat, contoursDetectPlate);

    // Get plate detected
    List<Mat> detectPlates = subMatDetectPlate.dropImage();
    Map<String, List<Mat>> mapNumberImg = new HashMap<String, List<Mat>>();

    for (int i = 0; i < detectPlates.size(); i++) {
        // pre-process
        Mat matDetectPlate = detectPlates.get(i);
        Mat matResult = new Mat(matDetectPlate.cols() * 2, matDetectPlate.rows() * 2, matDetectPlate.type());
        Imgproc.resize(matDetectPlate, matResult,
                new Size(matDetectPlate.cols() * 1, matDetectPlate.rows() * 1));

        ThresholdMat thresholdMatDetectPlate = new ThresholdMat(matResult, 0, 255, Imgproc.THRESH_OTSU);

        Mat thresholdMatDetectPlateMat = thresholdMatDetectPlate.createMatResult();

        CharacterSegment characterSegment = new CharacterSegment(thresholdMatDetectPlateMat.clone());

        List<MatOfPoint> contoursNumber = characterSegment.executeCharacterSegment();

        SubMat subMatNumberImg = new SubMat(thresholdMatDetectPlateMat.clone(), contoursNumber);

        mapNumberImg.put(String.valueOf(i), subMatNumberImg.dropImage());
    }
    return mapNumberImg;
}

From source file:org.lasarobotics.vision.detection.ObjectDetection.java

License:Open Source License

/**
 * Analyzes a scene for a target object.
 *
 * @param scene    The scene to be analyzed as a GRAYSCALE matrix
 * @param analysis The target object's analysis from analyzeObject
 * @return A complete scene analysis/*from w w  w .j ava 2  s .  c om*/
 */
public SceneAnalysis analyzeScene(Mat scene, ObjectAnalysis analysis) throws IllegalArgumentException {
    MatOfKeyPoint keypointsScene = new MatOfKeyPoint();

    //DETECT KEYPOINTS in scene
    detector.detect(scene, keypointsScene);

    //EXTRACT KEYPOINT INFO from scene
    Mat descriptorsScene = new Mat();
    extractor.compute(scene, keypointsScene, descriptorsScene);

    if (analysis == null) {
        throw new IllegalArgumentException("Analysis must not be null!");
    }

    if (analysis.descriptors.cols() != descriptorsScene.cols()
            || analysis.descriptors.type() != descriptorsScene.type()) {
        throw new IllegalArgumentException("Object and scene descriptors do not match in cols() or type().");
    }

    MatOfDMatch matches = new MatOfDMatch();
    matcher.match(analysis.descriptors, descriptorsScene, matches);

    //FILTER KEYPOINTS
    /*double max_dist = 0, min_dist = 100;
            
    for(int i = 0; i < objectAnalysis.descriptors.rows(); i++) {
    double dist = matches.get;
    if(dist < )
    }*/

    //STORE SCENE ANALYSIS
    return new SceneAnalysis(keypointsScene, descriptorsScene, matches, scene);
}