Example usage for org.opencv.core Mat submat

List of usage examples for org.opencv.core Mat submat

Introduction

In this page you can find the example usage for org.opencv.core Mat submat.

Prototype

public Mat submat(int rowStart, int rowEnd, int colStart, int colEnd) 

Source Link

Usage

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

public static Mat getPatch(@NonNull Mat mat, @NonNull Point patchCenter, int subMatSize) {

    //make a subMat around center of the patch
    int minRow = (int) Math.round(Math.max(patchCenter.y - subMatSize, 0));
    int maxRow = (int) Math.round(Math.min(patchCenter.y + subMatSize, mat.height()));
    int minCol = (int) Math.round(Math.max(patchCenter.x - subMatSize, 0));
    int maxCol = (int) Math.round(Math.min(patchCenter.x + subMatSize, mat.width()));

    //  create subMat
    return mat.submat(minRow, maxRow, minCol, maxCol).clone();
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

public static ColorDetected getPatchColor(@NonNull Mat mat, @NonNull Point patchCenter, int subMatSize) {

    //make a subMat around center of the patch
    int minRow = (int) Math.round(Math.max(patchCenter.y - subMatSize, 0));
    int maxRow = (int) Math.round(Math.min(patchCenter.y + subMatSize, mat.height()));
    int minCol = (int) Math.round(Math.max(patchCenter.x - subMatSize, 0));
    int maxCol = (int) Math.round(Math.min(patchCenter.x + subMatSize, mat.width()));

    //  create subMat
    Mat patch = mat.submat(minRow, maxRow, minCol, maxCol);

    // compute the mean color and return it
    return OpenCVUtil.detectStripPatchColor(patch);
}

From source file:org.firstinspires.ftc.teamcode.VuforiaColor.java

public void runOpMode() throws InterruptedException {
    //        frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    //        backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    //        frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    //        backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    //        rollerMotor = hardwareMap.dcMotor.get("rollerMotor");
    ////from   w w  w. j a  v  a 2  s .  c  o  m
    //        backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    //        backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.FRONT;
    this.vuforia = ClassFactory.createVuforiaLocalizer(parameters);
    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true); //enables RGB565 format for the image
    vuforia.setFrameQueueCapacity(1); //tells VuforiaLocalizer to only store one frame at a time
    piController = new PIController(.0016, 0.00013, 0.00023, 0.000012);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 1);
    VuforiaTrackables visionTargets = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    VuforiaTrackable wheelsTarget = visionTargets.get(0);
    wheelsTarget.setName("Wheels"); // Wheels

    VuforiaTrackable toolsTarget = visionTargets.get(1);
    toolsTarget.setName("Tools"); // Tools

    VuforiaTrackable legosTarget = visionTargets.get(2);
    legosTarget.setName("Legos"); // Legos

    VuforiaTrackable gearsTarget = visionTargets.get(3);
    gearsTarget.setName("Gears"); // Gears

    /** For convenience, gather together all the trackable objects in one easily-iterable collection */
    List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
    allTrackables.addAll(visionTargets);

    /**
     * We use units of mm here because that's the recommended units of measurement for the
     * size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
     *      <ImageTarget name="stones" size="247 173"/>
     * You don't *have to* use mm here, but the units here and the units used in the XML
     * target configuration files *must* correspond for the math to work out correctly.
     */
    float mmPerInch = 25.4f;
    float mmBotLength = 16 * mmPerInch;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;
    float mmPhoneZOffset = 5.5f * mmPerInch;

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    gearsTarget.setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    toolsTarget.setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    /*
     * To place the Wheels and Legos Targets on the Blue Audience wall:
     * - First we rotate it 90 around the field's X axis to flip it upright
     * - Finally, we translate it along the Y axis towards the blue audience wall.
     */
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    wheelsTarget.setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    legosTarget.setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    /**
     * Create a transformation matrix describing where the phone is on the robot. Here, we
     * put the phone on the right hand side of the robot with the screen facing in (see our
     * choice of BACK camera above) and in landscape mode. Starting from alignment between the
     * robot's and phone's axes, this is a rotation of -90deg along the Y axis.
     *
     * When determining whether a rotation is positive or negative, consider yourself as looking
     * down the (positive) axis of rotation from the positive towards the origin. Positive rotations
     * are then CCW, and negative rotations CW. An example: consider looking down the positive Z
     * axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
     * plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
     */
    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, mmPhoneZOffset)
            .multiplied(Orientation.getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES,
                    0, 180, 0));
    RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) visionTargets.get(0).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(1).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(2).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) visionTargets.get(3).getListener())
            .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();
    waitForStart();

    /** Start tracking the data sets we care about. */
    visionTargets.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    telemetry.addData("Loop", "Out");
    telemetry.update();
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        VuforiaLocalizer.CloseableFrame frame = vuforia.getFrameQueue().take();
        telemetry.update();
        if (frame != null) {
            Image rgb = null;
            long numImages = frame.getNumImages();

            for (int i = 0; i < numImages; i++) {
                if (frame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
                    rgb = frame.getImage(i);
                    break;
                } //if
            } //for

            if (rgb != null) {
                Bitmap bmp = Bitmap.createBitmap(rgb.getWidth(), rgb.getHeight(), Bitmap.Config.RGB_565);
                bmp.copyPixelsFromBuffer(rgb.getPixels());

                img = new Mat();
                Utils.bitmapToMat(bmp, img);
                telemetry.addData("Img", "Converted");
                telemetry.update();
            }
        }

        for (VuforiaTrackable beacon : allTrackables) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (upperLeft.getData()[0] - lowerLeft.getData()[0]);
                    int width = (int) (upperRight.getData()[1] - upperLeft.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 1
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 1 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);
                    telemetry.addData(img.rows() + "", img.cols());
                    telemetry.update();

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(230, 75, 255)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(130, 150, 255)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(130, 150, 255)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(230, 75, 255)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", piController.processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:org.firstinspires.ftc.teamcode.VuforiaMovement.java

public void runOpMode() throws InterruptedException {
    frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    rollerMotor = hardwareMap.dcMotor.get("rollerMotor");

    backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    this.vuforia = new VuforiaLocalizerImplSubclass(parameters);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);
    VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    beacons.get(0).setName("Wheels");
    beacons.get(1).setName("Tools");
    beacons.get(2).setName("Legos");
    beacons.get(3).setName("Gears");

    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;

    // Initialize the location of the targets and phone on the field
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(0).setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(1).setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(2).setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(3).setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, 0).multiplied(Orientation
            .getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES, -90, 0, 0));
    RobotLog.ii(TAG, "Phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) beacons.get(0).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(1).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(2).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(3).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();/*  w ww  . ja va 2s.  co  m*/
    waitForStart();

    /** Start tracking the data sets we care about. */
    beacons.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    movingToFirstBeacon = false;
    liningUpWithFirstBeacon = false;
    movingToSecondBeacon = false;
    liningUpWithSecondBeacon = false;
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        if (movingToFirstBeacon) {
            // TODO Estimate distance to the beacon from a point TBD
            // TODO Estimate distance to move forward and turn to face the beacon until second movement set
            // Move this outside the loop?

            // Move forward until you see the beacon
            while (movingToFirstBeacon) {
                // Move in increments to minimize the times you check the trackables
                for (int i = 0; i < 50; i++) {
                    frontRightMotor.setPower(1);
                    backRightMotor.setPower(1);
                    frontLeftMotor.setPower(1);
                    backLeftMotor.setPower(1);
                }

                for (VuforiaTrackable beacon : beacons) {
                    // Add beacon to telemetry if visible
                    if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                        visibleTarget = beacon.getName();
                        telemetry.addData(visibleTarget, "Visible");
                    }

                    OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon
                            .getListener()).getUpdatedRobotLocation();
                    if (robotLocationTransform != null) {
                        lastLocation = robotLocationTransform;
                    }
                }

                // Move to the beacon until the beacon is in sight
                if (lastLocation != null) {
                    movingToFirstBeacon = false; // Only execute this once
                }
            }
        }

        while (liningUpWithFirstBeacon) {
            for (VuforiaTrackable beacon : beacons) {
                // Add beacon to telemetry if visible
                if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                    visibleTarget = beacon.getName();
                    telemetry.addData(visibleTarget, "Visible");
                }

                OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                        .getUpdatedRobotLocation();
                if (robotLocationTransform != null) {
                    lastLocation = robotLocationTransform;
                }
            }

            RobotMovement movement = processLocation(lastLocation, visibleTarget);
            if (movement.isNoMovement()) {
                liningUpWithFirstBeacon = false;
            }

            processMovement(movement);
        }

        if (movingToSecondBeacon) {
            // TODO Estimate the movements/distance from the first beacon to the second
            movingToSecondBeacon = false; // Only execute this once
        }

        if (vuforia.rgb != null && !isButtonHit) {
            Bitmap bmp = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bmp.copyPixelsFromBuffer(vuforia.rgb.getPixels());

            img = new Mat();
            Utils.bitmapToMat(bmp, img);
        }

        for (VuforiaTrackable beacon : beacons) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */
                // DOES NOT WORK???
                degreesToTurn = Math.toDegrees(Math.atan2(translation.get(1), translation.get(2)));
                telemetry.addData("Degrees-", degreesToTurn);
                // TODO Check degreee turning threshold
                if (Math.abs(degreesToTurn) > 15) {
                    // Turn after doing calculating transformations
                    needToTurn = true;
                }

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);
                    telemetry.addData(beacon.getName() + "-Degrees", degreesToTurn);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (lowerLeft.getData()[0] - upperLeft.getData()[0]);
                    int width = (int) (upperLeft.getData()[1] - upperRight.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 0
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 0 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit && !needToTurn) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:org.lasarobotics.vision.detection.objects.Detectable.java

License:Open Source License

/**
 * Gets the average color of the object/* w ww . ja v a  2 s .  c  o m*/
 *
 * @param img      The image matrix, of any color size
 * @param imgSpace The image's color space
 * @return The average color of the region
 */
public Color averageColor(Mat img, ColorSpace imgSpace) {
    //Coerce values to stay within screen dimensions
    double leftX = MathUtil.coerce(0, img.cols() - 1, left());
    double rightX = MathUtil.coerce(0, img.cols() - 1, right());

    double topY = MathUtil.coerce(0, img.rows() - 1, top());
    double bottomY = MathUtil.coerce(0, img.rows() - 1, bottom());

    //Input points into array for calculation
    //TODO rectangular submatrix-based calculation isn't perfectly accurate when you have ellipses or weird shapes
    Mat subMat = img.submat((int) topY, (int) bottomY, (int) leftX, (int) rightX);

    //Calculate average and return new color instance
    return Color.create(Core.mean(subMat), imgSpace);
}

From source file:org.lasarobotics.vision.ftc.resq.BeaconAnalyzer.java

License:Open Source License

static Beacon.BeaconAnalysis analyze_FAST(ColorBlobDetector detectorRed, ColorBlobDetector detectorBlue,
        Mat img, Mat gray, ScreenOrientation orientation, Rectangle bounds, boolean debug) {
    //Figure out which way to read the image
    double orientationAngle = orientation.getAngle();
    boolean swapLeftRight = orientationAngle >= 180; //swap if LANDSCAPE_WEST or PORTRAIT_REVERSE
    boolean readOppositeAxis = orientation == ScreenOrientation.PORTRAIT
            || orientation == ScreenOrientation.PORTRAIT_REVERSE; //read other axis if any kind of portrait

    //Bound the image
    if (readOppositeAxis)
        //Force the analysis box to transpose inself in place
        //noinspection SuspiciousNameCombination
        bounds = new Rectangle(new Point(bounds.center().y / img.height() * img.width(),
                bounds.center().x / img.width() * img.height()), bounds.height(), bounds.width())
                        .clip(new Rectangle(img.size()));
    if (!swapLeftRight && readOppositeAxis)
        //Force the analysis box to flip across its primary axis
        bounds = new Rectangle(
                new Point((img.size().width / 2) + Math.abs(bounds.center().x - (img.size().width / 2)),
                        bounds.center().y),
                bounds.width(), bounds.height());
    else if (swapLeftRight && !readOppositeAxis)
        //Force the analysis box to flip across its primary axis
        bounds = new Rectangle(new Point(bounds.center().x, img.size().height - bounds.center().y),
                bounds.width(), bounds.height());
    bounds = bounds.clip(new Rectangle(img.size()));

    //Get contours within the bounds
    detectorRed.process(img);/*from  w  ww.j a  v a  2 s . co m*/
    detectorBlue.process(img);
    List<Contour> contoursRed = detectorRed.getContours();
    List<Contour> contoursBlue = detectorBlue.getContours();

    //DEBUG Draw contours before filtering
    if (debug)
        Drawing.drawContours(img, contoursRed, new ColorRGBA("#FF0000"), 2);
    if (debug)
        Drawing.drawContours(img, contoursBlue, new ColorRGBA("#0000FF"), 2);

    if (debug)
        Drawing.drawRectangle(img, bounds, new ColorRGBA("#aaaaaa"), 4);

    //Get the largest contour in each - we're calling this one the main light
    int largestIndexRed = findLargestIndexInBounds(contoursRed, bounds);
    int largestIndexBlue = findLargestIndexInBounds(contoursBlue, bounds);
    Contour largestRed = (largestIndexRed != -1) ? contoursRed.get(largestIndexRed) : null;
    Contour largestBlue = (largestIndexBlue != -1) ? contoursBlue.get(largestIndexBlue) : null;

    //If we don't have a main light for one of the colors, we know both colors are the same
    //TODO we should re-filter the contours by size to ensure that we get at least a decent size
    if (largestRed == null && largestBlue == null)
        return new Beacon.BeaconAnalysis();

    //INFO The best contour from each color (if available) is selected as red and blue
    //INFO The two best contours are then used to calculate the location of the beacon

    //If we don't have a main light for one of the colors, we know both colors are the same
    //TODO we should re-filter the contours by size to ensure that we get at least a decent size

    //If the largest part of the non-null color is wider than a certain distance, then both are bright
    //Otherwise, only one may be lit
    //If only one is lit, and is wider than a certain distance, it is bright
    //TODO We are currently assuming that the beacon cannot be in a "bright" state
    if (largestRed == null)
        return new Beacon.BeaconAnalysis();
    else if (largestBlue == null)
        return new Beacon.BeaconAnalysis();

    //The height of the beacon on screen is the height of the best contour
    Contour largestHeight = ((largestRed.size().height) > (largestBlue.size().height)) ? largestRed
            : largestBlue;
    double beaconHeight = largestHeight.size().height;

    //Get beacon width on screen by extrapolating from height
    final double beaconActualHeight = Constants.BEACON_HEIGHT; //cm, only the lit up portion - 14.0 for entire
    final double beaconActualWidth = Constants.BEACON_WIDTH; //cm
    final double beaconWidthHeightRatio = Constants.BEACON_WH_RATIO;
    double beaconWidth = beaconHeight * beaconWidthHeightRatio;
    Size beaconSize = new Size(beaconWidth, beaconHeight);

    //Look at the locations of the largest contours
    //Check to see if the largest red contour is more left-most than the largest right contour
    //If it is, then we know that the left beacon is red and the other blue, and vice versa

    Point bestRedCenter = largestRed.centroid();
    Point bestBlueCenter = largestBlue.centroid();

    //DEBUG R/B text
    if (debug)
        Drawing.drawText(img, "R", bestRedCenter, 1.0f, new ColorRGBA(255, 0, 0));
    if (debug)
        Drawing.drawText(img, "B", bestBlueCenter, 1.0f, new ColorRGBA(0, 0, 255));

    //Test which side is red and blue
    //If the distance between the sides is smaller than a value, then return unknown
    final int minDistance = (int) (Constants.DETECTION_MIN_DISTANCE * beaconSize.width); //percent of beacon width

    boolean leftIsRed;
    Contour leftMostContour, rightMostContour;
    if (readOppositeAxis) {
        if (bestRedCenter.y + minDistance < bestBlueCenter.y) {
            leftIsRed = true;
        } else if (bestBlueCenter.y + minDistance < bestRedCenter.y) {
            leftIsRed = false;
        } else {
            return new Beacon.BeaconAnalysis();
        }
    } else {
        if (bestRedCenter.x + minDistance < bestBlueCenter.x) {
            leftIsRed = true;
        } else if (bestBlueCenter.x + minDistance < bestRedCenter.x) {
            leftIsRed = false;
        } else {
            return new Beacon.BeaconAnalysis();
        }
    }

    //Swap left and right if necessary
    leftIsRed = swapLeftRight != leftIsRed;

    //Get the left-most best contour (or top-most if axis swapped) (or right-most if L/R swapped)
    if (readOppositeAxis) {
        //Get top-most best contour
        leftMostContour = ((largestRed.topLeft().y) < (largestBlue.topLeft().y)) ? largestRed : largestBlue;
        //Get bottom-most best contour
        rightMostContour = ((largestRed.topLeft().y) < (largestBlue.topLeft().y)) ? largestBlue : largestRed;
    } else {
        //Get left-most best contour
        leftMostContour = ((largestRed.topLeft().x) < (largestBlue.topLeft().x)) ? largestRed : largestBlue;
        //Get the right-most best contour
        rightMostContour = ((largestRed.topLeft().x) < (largestBlue.topLeft().x)) ? largestBlue : largestRed;
    }

    //DEBUG Logging
    if (debug)
        Log.d("Beacon", "Orientation: " + orientation + "Angle: " + orientationAngle + " Swap Axis: "
                + readOppositeAxis + " Swap Direction: " + swapLeftRight);

    //Swap left and right if necessary
    //BUGFIX: invert when we swap
    if (!swapLeftRight) {
        Contour temp = leftMostContour;
        leftMostContour = rightMostContour;
        rightMostContour = temp;
    }

    //Now that we have the two contours, let's find ellipses that match

    //Draw the box surrounding both contours
    //Get the width of the contours
    double widthBeacon = rightMostContour.right() - leftMostContour.left();

    //Center of contours is the average of centroids of the contours
    Point center = new Point((leftMostContour.centroid().x + rightMostContour.centroid().x) / 2,
            (leftMostContour.centroid().y + rightMostContour.centroid().y) / 2);

    //Get the combined height of the contours
    double heightContours = Math.max(leftMostContour.bottom(), rightMostContour.bottom())
            - Math.min(leftMostContour.top(), rightMostContour.top());

    //The largest size ratio of tested over actual is the scale ratio
    double scale = Math.max(widthBeacon / beaconActualWidth, heightContours / beaconActualHeight);

    //Define size of bounding box by scaling the actual beacon size
    Size beaconSizeFinal = new Size(beaconActualWidth * scale, beaconActualHeight * scale);

    //Swap x and y if we rotated the view
    if (readOppositeAxis) {
        //noinspection SuspiciousNameCombination
        beaconSizeFinal = new Size(beaconSizeFinal.height, beaconSizeFinal.width);
    }

    //Get points of the bounding box
    Point beaconTopLeft = new Point(center.x - (beaconSizeFinal.width / 2),
            center.y - (beaconSizeFinal.height / 2));
    Point beaconBottomRight = new Point(center.x + (beaconSizeFinal.width / 2),
            center.y + (beaconSizeFinal.height / 2));
    Rectangle boundingBox = new Rectangle(new Rect(beaconTopLeft, beaconBottomRight));

    //Get ellipses in region of interest
    //Make sure the rectangles don't leave the image size
    Rectangle leftRect = leftMostContour.getBoundingRectangle().clip(new Rectangle(img.size()));
    Rectangle rightRect = rightMostContour.getBoundingRectangle().clip(new Rectangle(img.size()));
    Mat leftContourImg = gray.submat((int) leftRect.top(), (int) leftRect.bottom(), (int) leftRect.left(),
            (int) leftRect.right());
    Mat rightContourImg = gray.submat((int) rightRect.top(), (int) rightRect.bottom(), (int) rightRect.left(),
            (int) rightRect.right());

    //Locate ellipses in the image to process contours against
    List<Ellipse> ellipsesLeft = PrimitiveDetection.locateEllipses(leftContourImg).getEllipses();
    Detectable.offset(ellipsesLeft, new Point(leftRect.left(), leftRect.top()));
    List<Ellipse> ellipsesRight = PrimitiveDetection.locateEllipses(rightContourImg).getEllipses();
    Detectable.offset(ellipsesRight, new Point(rightRect.left(), rightRect.top()));

    //Score ellipses
    BeaconScoringCOMPLEX scorer = new BeaconScoringCOMPLEX(img.size());
    List<BeaconScoringCOMPLEX.ScoredEllipse> scoredEllipsesLeft = scorer.scoreEllipses(ellipsesLeft, null, null,
            gray);
    scoredEllipsesLeft = filterEllipses(scoredEllipsesLeft);
    ellipsesLeft = BeaconScoringCOMPLEX.ScoredEllipse.getList(scoredEllipsesLeft);
    if (debug)
        Drawing.drawEllipses(img, ellipsesLeft, new ColorRGBA("#00ff00"), 3);
    List<BeaconScoringCOMPLEX.ScoredEllipse> scoredEllipsesRight = scorer.scoreEllipses(ellipsesRight, null,
            null, gray);
    scoredEllipsesRight = filterEllipses(scoredEllipsesRight);
    ellipsesRight = BeaconScoringCOMPLEX.ScoredEllipse.getList(scoredEllipsesRight);
    if (debug)
        Drawing.drawEllipses(img, ellipsesRight, new ColorRGBA("#00ff00"), 3);

    //Calculate ellipse center if present
    Point centerLeft;
    Point centerRight;
    boolean done = false;
    do {
        centerLeft = null;
        centerRight = null;
        if (scoredEllipsesLeft.size() > 0)
            centerLeft = scoredEllipsesLeft.get(0).ellipse.center();
        if (scoredEllipsesRight.size() > 0)
            centerRight = scoredEllipsesRight.get(0).ellipse.center();

        //Flip axis if necesary
        if (centerLeft != null && readOppositeAxis) {
            centerLeft.set(new double[] { centerLeft.y, centerLeft.x });
        }
        if (centerRight != null && readOppositeAxis) {
            centerRight.set(new double[] { centerRight.y, centerRight.x });
        }

        //Make very, very sure that we didn't just find the same ellipse
        if (centerLeft != null && centerRight != null) {
            if (Math.abs(centerLeft.x - centerRight.x) < Constants.ELLIPSE_MIN_DISTANCE * beaconSize.width) {
                //Are both ellipses on the left or right side of the beacon? - remove the opposite side's ellipse
                if (Math.abs(centerLeft.x - leftRect.center().x) < Constants.ELLIPSE_MIN_DISTANCE
                        * beaconSize.width)
                    scoredEllipsesRight.remove(0);
                else
                    scoredEllipsesLeft.remove(0);
            } else
                done = true;
        } else
            done = true;

    } while (!done);

    //Improve the beacon center if both ellipses present
    byte ellipseExtrapolated = 0;
    if (centerLeft != null && centerRight != null) {
        if (readOppositeAxis)
            center.y = (centerLeft.x + centerRight.x) / 2;
        else
            center.x = (centerLeft.x + centerRight.x) / 2;
    }
    //Extrapolate other ellipse location if one present
    //FIXME: This method of extrapolation may not work when readOppositeAxis is true
    else if (centerLeft != null) {
        ellipseExtrapolated = 2;
        if (readOppositeAxis)
            centerRight = new Point(centerLeft.x - 2 * Math.abs(center.x - centerLeft.x),
                    (centerLeft.y + center.y) / 2);
        else
            centerRight = new Point(centerLeft.x + 2 * Math.abs(center.x - centerLeft.x),
                    (centerLeft.y + center.y) / 2);
        if (readOppositeAxis)
            centerRight.set(new double[] { centerRight.y, centerRight.x });
    } else if (centerRight != null) {
        ellipseExtrapolated = 1;
        if (readOppositeAxis)
            centerLeft = new Point(centerRight.x + 2 * Math.abs(center.x - centerRight.x),
                    (centerRight.y + center.y) / 2);
        else
            centerLeft = new Point(centerRight.x - 2 * Math.abs(center.x - centerRight.x),
                    (centerRight.y + center.y) / 2);
        if (readOppositeAxis)
            centerLeft.set(new double[] { centerLeft.y, centerLeft.x });
    }

    //Draw center locations
    if (debug)
        Drawing.drawCross(img, center, new ColorRGBA("#ffffff"), 10, 4);
    if (debug && centerLeft != null) {
        ColorRGBA c = ellipseExtrapolated != 1 ? new ColorRGBA("#ffff00") : new ColorRGBA("#ff00ff");
        //noinspection SuspiciousNameCombination
        Drawing.drawCross(img, !readOppositeAxis ? centerLeft : new Point(centerLeft.y, centerLeft.x), c, 8, 3);
    }
    if (debug && centerRight != null) {
        ColorRGBA c = ellipseExtrapolated != 2 ? new ColorRGBA("#ffff00") : new ColorRGBA("#ff00ff");
        //noinspection SuspiciousNameCombination
        Drawing.drawCross(img, !readOppositeAxis ? centerRight : new Point(centerRight.y, centerRight.x), c, 8,
                3);
    }

    //Draw the rectangle containing the beacon
    if (debug)
        Drawing.drawRectangle(img, boundingBox, new ColorRGBA(0, 255, 0), 4);

    //Tell us the height of the beacon
    //TODO later we can get the distance away from the beacon based on its height and position

    //Remove the largest index and look for the next largest
    //If the next largest is (mostly) within the area of the box, then merge it in with the largest
    //Check if the size of the largest contour(s) is about twice the size of the other
    //This would indicate one is brightly lit and the other is not
    //If this is not true, then neither part of the beacon is highly lit

    //Get confidence approximation
    double WH_ratio = widthBeacon / beaconHeight;
    double ratioError = Math.abs((Constants.BEACON_WH_RATIO - WH_ratio)) / Constants.BEACON_WH_RATIO; // perfect value = 0;
    double averageHeight = (leftMostContour.height() + rightMostContour.height()) / 2.0;
    double dy = Math.abs((leftMostContour.centroid().y - rightMostContour.centroid().y) / averageHeight
            * Constants.FAST_HEIGHT_DELTA_FACTOR);
    double dArea = Math.sqrt(leftMostContour.area() / rightMostContour.area());
    double buttonsdy = (centerLeft != null && centerRight != null)
            ? (Math.abs(centerLeft.y - centerRight.y) / averageHeight * Constants.FAST_HEIGHT_DELTA_FACTOR)
            : Constants.ELLIPSE_PRESENCE_BIAS;
    double confidence = MathUtil.normalPDFNormalized(
            MathUtil.distance(MathUtil.distance(MathUtil.distance(ratioError, dy), dArea), buttonsdy)
                    / Constants.FAST_CONFIDENCE_ROUNDNESS,
            Constants.FAST_CONFIDENCE_NORM, 0.0);

    //Get button ellipses
    Ellipse leftEllipse = scoredEllipsesLeft.size() > 0 ? scoredEllipsesLeft.get(0).ellipse : null;
    Ellipse rightEllipse = scoredEllipsesRight.size() > 0 ? scoredEllipsesRight.get(0).ellipse : null;

    //Test for color switching
    if (leftEllipse != null && rightEllipse != null && leftEllipse.center().x > rightEllipse.center().x) {
        Ellipse tE = leftEllipse;
        leftEllipse = rightEllipse;
        rightEllipse = tE;
    } else if ((leftEllipse != null && leftEllipse.center().x > center.x)
            || (rightEllipse != null && rightEllipse.center().x < center.x)) {
        Ellipse tE = leftEllipse;
        leftEllipse = rightEllipse;
        rightEllipse = tE;
    }

    //Axis correction for ellipses
    if (swapLeftRight) {
        if (leftEllipse != null)
            leftEllipse = new Ellipse(
                    new RotatedRect(new Point(img.width() - leftEllipse.center().x, leftEllipse.center().y),
                            leftEllipse.size(), leftEllipse.angle()));
        if (rightEllipse != null)
            rightEllipse = new Ellipse(
                    new RotatedRect(new Point(img.width() - rightEllipse.center().x, rightEllipse.center().y),
                            rightEllipse.size(), rightEllipse.angle()));
        //Swap again after correcting axis to ensure left is left and right is right
        Ellipse tE = leftEllipse;
        leftEllipse = rightEllipse;
        rightEllipse = tE;
    }

    //Switch axis if necessary
    if (readOppositeAxis)
        boundingBox = boundingBox.transpose();

    if (leftIsRed)
        return new Beacon.BeaconAnalysis(Beacon.BeaconColor.RED, Beacon.BeaconColor.BLUE, boundingBox,
                confidence, leftEllipse, rightEllipse);
    else
        return new Beacon.BeaconAnalysis(Beacon.BeaconColor.BLUE, Beacon.BeaconColor.RED, boundingBox,
                confidence, leftEllipse, rightEllipse);
}

From source file:servershootingstar.BallDetector.java

public static String getAngleFromRobot(int input) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    System.out.println("before");
    int point;/*from   w w w.  j a v  a  2  s  . c o m*/
    try {
        Mat frame = new Mat();
        System.out.println("AAAAAA");
        Mat originalFrame = new Mat();
        System.out.println("BBBBBB");
        VideoCapture videoCapture = new VideoCapture(0);
        System.out.println("CCCCCCCC");
        videoCapture.read(originalFrame);
        //                System.out.println("original" + originalFrame.dump());
        //                initSwing(originalFrame);
        int workaround = 20;
        while (workaround > 0) {
            System.out.println("workaround " + workaround);
            videoCapture.read(originalFrame);
            //                    System.out.println(originalFrame.dump() + originalFrame.dump().length());
            workaround--;
        }
        //                Imgcodecs.imwrite("C:\\Users\\Goran\\Desktop\\Goran.jpg", originalFrame);
        Mat cropped = originalFrame.submat(originalFrame.rows() / 4, originalFrame.rows() / 4 * 3, 0,
                originalFrame.cols());
        initSwing(cropped);
        Imgproc.cvtColor(cropped, frame, Imgproc.COLOR_BGR2HSV);

        // insert lower and upper bounds for colors
        Scalar greenLowerB = new Scalar(20, 55, 55);
        Scalar greenUpperB = new Scalar(40, 255, 255);

        Scalar redLowerB = new Scalar(160, 100, 35);
        Scalar red1LowerB = new Scalar(0, 100, 35);

        Scalar redUpperB = new Scalar(180, 255, 255);
        Scalar red1UpperB = new Scalar(20, 255, 255);

        Scalar blueLowerB = new Scalar(100, 100, 35);
        Scalar blueUpperB = new Scalar(120, 255, 155);

        Mat mask = new Mat();

        if (input == 1) {
            Mat otherMask = new Mat();
            Core.inRange(frame, redLowerB, redUpperB, mask);
            Core.inRange(frame, red1LowerB, red1UpperB, otherMask);
            Core.bitwise_or(mask, otherMask, mask);
        } else if (input == 2) {
            Core.inRange(frame, greenLowerB, greenUpperB, mask);
        } else {
            Core.inRange(frame, blueLowerB, blueUpperB, mask);
        }
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));
        Imgproc.erode(mask, mask, Imgproc.getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(5, 5)));

        int minX = Integer.MAX_VALUE, maxX = Integer.MIN_VALUE, minY = Integer.MAX_VALUE,
                maxY = Integer.MIN_VALUE;
        for (int i = 0; i < mask.rows(); ++i) {
            for (int j = 0; j < mask.cols(); ++j) {
                double value = mask.get(i, j)[0];
                //System.out.println(value);
                if (value > 1) {
                    minX = Math.min(minX, i);
                    maxX = Math.max(maxX, i);
                    minY = Math.min(minY, j);
                    maxY = Math.max(maxY, j);
                }
            }
        }

        Imgproc.circle(mask, new Point((maxY + minY) / 2, (minX + maxX) / 2), 3, new Scalar(0, 0, 0));
        initSwing(mask);

        point = (minY + maxY) / 2;

        point = point - 320;

        cos = point / 320.0;
        System.out.println("OK");
    } catch (Exception ex) {
        point = (new Random()).nextInt(640);
        cos = -1;
        System.out.println("error imase, davam random brojka: " + point);
        ex.printStackTrace();

    }

    //            System.out.println();
    //            System.out.println("tockata u granica od [-320, 320]");
    //            System.out.println(point);
    //            System.out.println("cosinus vrednost");
    //            System.out.println(cos);
    //            System.out.println();
    System.out.println("cos = " + cos);
    if (cos == -1) {
        return "-1";
    }
    int res = (int) (2 * Math.toDegrees(Math.acos(cos)) / 3);
    System.out.println("Res: " + res);
    return String.valueOf(res);
}

From source file:syncleus.dann.data.video.TLDView.java

License:Apache License

@Override
public Mat onCameraFrame(Mat originalFrame) {
    try {//  w  w w . ja  va 2 s. c  om
        // Image is too big and this requires too much CPU for a phone, so scale everything down...
        Imgproc.resize(originalFrame, _workingFrame, WORKING_FRAME_SIZE);
        final Size workingRatio = new Size(originalFrame.width() / WORKING_FRAME_SIZE.width,
                originalFrame.height() / WORKING_FRAME_SIZE.height);
        // usefull to see what we're actually working with...
        _workingFrame.copyTo(originalFrame.submat(originalFrame.rows() - _workingFrame.rows(),
                originalFrame.rows(), 0, _workingFrame.cols()));

        if (_trackedBox != null) {
            if (_tld == null) { // run the 1st time only
                Imgproc.cvtColor(_workingFrame, _lastGray, Imgproc.COLOR_RGB2GRAY);
                _tld = new Tld(_tldProperties);
                final Rect scaledDownTrackedBox = scaleDown(_trackedBox, workingRatio);
                System.out.println("Working Ration: " + workingRatio + " / Tracking Box: " + _trackedBox
                        + " / Scaled down to: " + scaledDownTrackedBox);
                try {
                    _tld.init(_lastGray, scaledDownTrackedBox);
                } catch (Exception eInit) {
                    // start from scratch, you have to select an init box again !
                    _trackedBox = null;
                    _tld = null;
                    throw eInit; // re-throw it as it will be dealt with later
                }
            } else {
                Imgproc.cvtColor(_workingFrame, _currentGray, Imgproc.COLOR_RGB2GRAY);

                _processFrameStruct = _tld.processFrame(_lastGray, _currentGray);
                drawPoints(originalFrame, _processFrameStruct.lastPoints, workingRatio, new Scalar(255, 0, 0));
                drawPoints(originalFrame, _processFrameStruct.currentPoints, workingRatio,
                        new Scalar(0, 255, 0));
                drawBox(originalFrame, scaleUp(_processFrameStruct.currentBBox, workingRatio),
                        new Scalar(0, 0, 255));

                _currentGray.copyTo(_lastGray);

                // overlay the current positive examples on the real image(needs converting at the same time !)
                //copyTo(_tld.getPPatterns(), originalFrame);
            }
        }
    } catch (Exception e) {
        _errMessage = e.getClass().getSimpleName() + " / " + e.getMessage();
        Log.e(TLDUtil.TAG, "TLDView PROBLEM", e);
    }

    if (_errMessage != null) {
        Core.putText(originalFrame, _errMessage, new Point(0, 300), Core.FONT_HERSHEY_PLAIN, 1.3d,
                new Scalar(255, 0, 0), 2);
    }

    return originalFrame;
}

From source file:uk.ac.horizon.aestheticodes.controllers.MatTranform.java

License:Open Source License

static Mat crop(Mat imgMat) {
    final int size = Math.min(imgMat.rows(), imgMat.cols());

    final int colStart = (imgMat.cols() - size) / 2;
    final int rowStart = (imgMat.rows() - size) / 2;

    return imgMat.submat(rowStart, rowStart + size, colStart, colStart + size);
}

From source file:uk.ac.horizon.artcodes.process.OtsuThresholder.java

License:Open Source License

@Override
public void process(ImageBuffers buffers) {
    Mat image = buffers.getImageInGrey();
    Imgproc.GaussianBlur(image, image, new Size(5, 5), 0);

    if (display == Display.greyscale) {
        Imgproc.cvtColor(image, buffers.getOverlay(false), Imgproc.COLOR_GRAY2BGRA);
    }/*  w w  w .  ja  v a 2  s .co  m*/

    tiles = 1;
    final int tileHeight = (int) image.size().height / tiles;
    final int tileWidth = (int) image.size().width / tiles;

    // Split image into tiles and apply process on each image tile separately.
    for (int tileRow = 0; tileRow < tiles; tileRow++) {
        final int startRow = tileRow * tileHeight;
        int endRow;
        if (tileRow < tiles - 1) {
            endRow = (tileRow + 1) * tileHeight;
        } else {
            endRow = (int) image.size().height;
        }

        for (int tileCol = 0; tileCol < tiles; tileCol++) {
            final int startCol = tileCol * tileWidth;
            int endCol;
            if (tileCol < tiles - 1) {
                endCol = (tileCol + 1) * tileWidth;
            } else {
                endCol = (int) image.size().width;
            }

            final Mat tileMat = image.submat(startRow, endRow, startCol, endCol);
            Imgproc.threshold(tileMat, tileMat, 127, 255, Imgproc.THRESH_OTSU);
            tileMat.release();
        }
    }

    if (display == Display.threshold) {
        Imgproc.cvtColor(image, buffers.getOverlay(false), Imgproc.COLOR_GRAY2BGRA);
    }

    buffers.setImage(image);
}