Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat(Mat m, Range rowRange, Range colRange) 

Source Link

Usage

From source file:classes.Util.java

public static Mat img2Mat(BufferedImage in) {

    int width = in.getWidth();
    int height = in.getHeight();

    Mat out;//from www  .  j a  v  a2s.co  m
    byte[] data;
    int r, g, b;

    if (in.getType() == BufferedImage.TYPE_INT_RGB) {
        out = new Mat(height, width, CvType.CV_8UC3);
        data = new byte[width * height * (int) out.elemSize()];
        int[] dataBuff = in.getRGB(0, 0, width, height, null, 0, width);
        for (int i = 0; i < dataBuff.length; i++) {
            data[i * 3] = (byte) ((dataBuff[i] >> 16) & 0xFF);
            data[i * 3 + 1] = (byte) ((dataBuff[i] >> 8) & 0xFF);
            data[i * 3 + 2] = (byte) ((dataBuff[i] >> 0) & 0xFF);
        }
    } else {
        out = new Mat(height, width, CvType.CV_8UC1);
        data = new byte[width * height * (int) out.elemSize()];
        int[] dataBuff = in.getRGB(0, 0, width, height, null, 0, width);
        for (int i = 0; i < dataBuff.length; i++) {
            r = (byte) ((dataBuff[i] >> 16) & 0xFF);
            g = (byte) ((dataBuff[i] >> 8) & 0xFF);
            b = (byte) ((dataBuff[i] >> 0) & 0xFF);
            data[i] = (byte) ((0.21 * r) + (0.71 * g) + (0.07 * b)); //luminosity
        }
    }
    out.put(0, 0, data);
    return out;
}

From source file:club.towr5291.Concepts.ConceptVuforiaOpGrabImage.java

@Override
public void runOpMode() throws InterruptedException {

    //load variables
    sharedPreferences = PreferenceManager.getDefaultSharedPreferences(hardwareMap.appContext);
    teamNumber = sharedPreferences.getString("club.towr5291.Autonomous.TeamNumber", "0000");
    allianceColor = sharedPreferences.getString("club.towr5291.Autonomous.Color", "Red");
    allianceStartPosition = sharedPreferences.getString("club.towr5291.Autonomous.StartPosition", "Left");
    allianceParkPosition = sharedPreferences.getString("club.towr5291.Autonomous.ParkPosition", "Vortex");
    delay = Integer.parseInt(sharedPreferences.getString("club.towr5291.Autonomous.Delay", "0"));
    numBeacons = sharedPreferences.getString("club.towr5291.Autonomous.Beacons", "One");
    robotConfig = sharedPreferences.getString("club.towr5291.Autonomous.RobotConfig", "TileRunner-2x40");
    debug = Integer.parseInt(sharedPreferences.getString("club.towr5291.Autonomous.Debug", null));

    debug = 3;//from  w  w  w  .ja v  a 2 s.c om

    //start the log
    startDate = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(new Date());
    fileLogger = new FileLogger(runtime);
    fileLogger.open();
    fileLogger.write("Time,SysMS,Thread,Event,Desc");
    fileLogger.writeEvent("init()", "Log Started");

    //BeaconAnalysisOCV beaconColour = new BeaconAnalysisOCV();
    //BeaconAnalysisOCV2 beaconColour = new BeaconAnalysisOCV2();
    //BeaconAnalysisOCVAnalyse beaconColour = new BeaconAnalysisOCVAnalyse();
    BeaconAnalysisOCVPlayground beaconColour = new BeaconAnalysisOCVPlayground();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    parameters.vuforiaLicenseKey = "AVATY7T/////AAAAGQJxfNYzLUgGjSx0aOEU0Q0rpcfZO2h2sY1MhUZUr+Bu6RgoUMUP/nERGmD87ybv1/lM2LBFDxcBGRHkXvxtkHel4XEUCsNHFTGWYcVkMIZqctQsIrTe13MnUvSOfQj8ig7xw3iULcwDpY+xAftW61dKTJ0IAOCxx2F0QjJWqRJBxrEUR/DfQi4LyrgnciNMXCiZ8KFyBdC63XMYkQj2joTN579+2u5f8aSCe8jkAFnBLcB1slyaU9lhnlTEMcFjwrLBcWoYIFAZluvFT0LpqZRlS1/XYf45QBSJztFKHIsj1rbCgotAE36novnAQBs74ewnWsJifokJGOYWdFJveWzn3GE9OEH23Y5l7kFDu4wc";
    //parameters.cameraMonitorFeedback = VuforiaLocalizer.Parameters.CameraMonitorFeedback.AXES;

    VuforiaLocalizer vuforia = ClassFactory.createVuforiaLocalizer(parameters);
    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true); //enables RGB565 format for the image
    vuforia.setFrameQueueCapacity(1); //tells VuforiaLocalizer to only store one frame at a time
    //ConceptVuforiaGrabImage vuforia = new ConceptVuforiaGrabImage(parameters);
    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);

    VuforiaTrackables velocityVortex = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    VuforiaTrackable wheels = velocityVortex.get(0);
    wheels.setName("wheels"); // wheels target

    VuforiaTrackable tools = velocityVortex.get(1);
    tools.setName("tools"); // tools target

    VuforiaTrackable legos = velocityVortex.get(2);
    legos.setName("legos"); // legos target

    VuforiaTrackable gears = velocityVortex.get(3);
    gears.setName("gears"); // gears target

    /** For convenience, gather together all the trackable objects in one easily-iterable collection */
    List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
    allTrackables.addAll(velocityVortex);

    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true);

    /**
     * We use units of mm here because that's the recommended units of measurement for the
     * size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
     *      <ImageTarget name="stones" size="247 173"/>
     * You don't *have to* use mm here, but the units here and the units used in the XML
     * target configuration files *must* correspond for the math to work out correctly.
     */
    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels

    /**
     * In order for localization to work, we need to tell the system where each target we
     * wish to use for navigation resides on the field, and we need to specify where on the robot
     * the phone resides. These specifications are in the form of <em>transformation matrices.</em>
     * Transformation matrices are a central, important concept in the math here involved in localization.
     * See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
     * for detailed information. Commonly, you'll encounter transformation matrices as instances
     * of the {@link OpenGLMatrix} class.
     *
     * For the most part, you don't need to understand the details of the math of how transformation
     * matrices work inside (as fascinating as that is, truly). Just remember these key points:
     * <ol>
     *
     *     <li>You can put two transformations together to produce a third that combines the effect of
     *     both of them. If, for example, you have a rotation transform R and a translation transform T,
     *     then the combined transformation matrix RT which does the rotation first and then the translation
     *     is given by {@code RT = T.multiplied(R)}. That is, the transforms are multiplied in the
     *     <em>reverse</em> of the chronological order in which they applied.</li>
     *
     *     <li>A common way to create useful transforms is to use methods in the {@link OpenGLMatrix}
     *     class and the Orientation class. See, for example, {@link OpenGLMatrix#translation(float,
     *     float, float)}, {@link OpenGLMatrix#rotation(AngleUnit, float, float, float, float)}, and
     *     {@link Orientation#getRotationMatrix(AxesReference, AxesOrder, AngleUnit, float, float, float)}.
     *     Related methods in {@link OpenGLMatrix}, such as {@link OpenGLMatrix#rotated(AngleUnit,
     *     float, float, float, float)}, are syntactic shorthands for creating a new transform and
     *     then immediately multiplying the receiver by it, which can be convenient at times.</li>
     *
     *     <li>If you want to break open the black box of a transformation matrix to understand
     *     what it's doing inside, use {@link MatrixF#getTranslation()} to fetch how much the
     *     transform will move you in x, y, and z, and use {@link Orientation#getOrientation(MatrixF,
     *     AxesReference, AxesOrder, AngleUnit)} to determine the rotational motion that the transform
     *     will impart. See {@link #format(OpenGLMatrix)} below for an example.</li>
     *
     * </ol>
     *
     * This example places the "stones" image on the perimeter wall to the Left
     *  of the Red Driver station wall.  Similar to the Red Beacon Location on the Res-Q
     *
     * This example places the "chips" image on the perimeter wall to the Right
     *  of the Blue Driver station.  Similar to the Blue Beacon Location on the Res-Q
     *
     * See the doc folder of this project for a description of the field Axis conventions.
     *
     * Initially the target is conceptually lying at the origin of the field's coordinate system
     * (the center of the field), facing up.
     *
     * In this configuration, the target's coordinate system aligns with that of the field.
     *
     * In a real situation we'd also account for the vertical (Z) offset of the target,
     * but for simplicity, we ignore that here; for a real robot, you'll want to fix that.
     *
     * To place the Wheels Target on the Red Audience wall:
     * - First we rotate it 90 around the field's X axis to flip it upright
     * - Then we rotate it  90 around the field's Z access to face it away from the audience.
     * - Finally, we translate it back along the X axis towards the red audience wall.
     *
     */

    // RED Targets
    // To Place GEARS Target
    // position is approximately - (-6feet, -1feet)

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -1 * 12 * mmPerInch, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    gears.setLocation(gearsTargetLocationOnField);
    //RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    // To Place GEARS Target
    // position is approximately - (-6feet, 3feet)
    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 2, 3 * 12 * mmPerInch, 0)
            //.translation(0, mmFTCFieldWidth/2, 0)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    tools.setLocation(toolsTargetLocationOnField);
    //RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    //Finsih RED Targets

    // BLUE Targets
    // To Place LEGOS Target
    // position is approximately - (-3feet, 6feet)

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-3 * 12 * mmPerInch, mmFTCFieldWidth / 2, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    legos.setLocation(legosTargetLocationOnField);
    //RobotLog.ii(TAG, "Gears Target=%s", format(legosTargetLocationOnField));

    // To Place WHEELS Target
    // position is approximately - (1feet, 6feet)
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(1 * 12 * mmPerInch, mmFTCFieldWidth / 2, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    wheels.setLocation(wheelsTargetLocationOnField);
    //RobotLog.ii(TAG, "Tools Target=%s", format(wheelsTargetLocationOnField));

    //Finsih BLUE Targets

    /**
     * Create a transformation matrix describing where the phone is on the robot. Here, we
     * put the phone on the right hand side of the robot with the screen facing in (see our
     * choice of BACK camera above) and in landscape mode. Starting from alignment between the
     * robot's and phone's axes, this is a rotation of -90deg along the Y axis.
     *
     * When determining whether a rotation is positive or negative, consider yourself as looking
     * down the (positive) axis of rotation from the positive towards the origin. Positive rotations
     * are then CCW, and negative rotations CW. An example: consider looking down the positive Z
     * axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
     * plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
     */
    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation((mmBotWidth / 2), 50, 0).multiplied(Orientation
            .getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES, -90, 0, 0));
    //RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));

    /**
     * Let the trackable listeners we care about know where the phone is. We know that each
     * listener is a {@link VuforiaTrackableDefaultListener} and can so safely cast because
     * we have not ourselves installed a listener of a different type.
     */
    ((VuforiaTrackableDefaultListener) gears.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) tools.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) legos.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) wheels.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /**
     * A brief tutorial: here's how all the math is going to work:
     *
     * C = phoneLocationOnRobot  maps   phone coords -> robot coords
     * P = tracker.getPose()     maps   image target coords -> phone coords
     * L = redTargetLocationOnField maps   image target coords -> field coords
     *
     * So
     *
     * C.inverted()              maps   robot coords -> phone coords
     * P.inverted()              maps   phone coords -> imageTarget coords
     *
     * Putting that all together,
     *
     * L x P.inverted() x C.inverted() maps robot coords to field coords.
     *
     * @see VuforiaTrackableDefaultListener#getRobotLocation()
     */

    waitForStart();

    //Mat tmp = new Mat();

    velocityVortex.activate();

    Image rgb = null;

    int loop = 0;
    Constants.BeaconColours Colour = Constants.BeaconColours.UNKNOWN;

    while (opModeIsActive()) {

        boolean gotBeacomDims = false;
        Point beaconBotRight = new Point(0, 0);
        Point beaconTopLeft = new Point(0, 0);
        Point beaconMiddle = new Point(0, 0);

        for (VuforiaTrackable beac : velocityVortex) {

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose();

            if (pose != null) {

                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-TARGET_WIDTH / 2, TARGET_HEIGHT / 2, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(TARGET_WIDTH / 2, TARGET_HEIGHT / 2, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(TARGET_WIDTH / 2, -TARGET_HEIGHT / 2, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-TARGET_WIDTH / 2, -TARGET_HEIGHT / 2, 0));

                double dblMidPointTopx = (upperRight.getData()[0] + upperLeft.getData()[0]) / 2;
                double dblMidPointTopy = (upperRight.getData()[1] + upperLeft.getData()[1]) / 2;
                double dblMidPointBotx = (lowerRight.getData()[0] + lowerLeft.getData()[0]) / 2;
                double dblMidPointBoty = (lowerRight.getData()[1] + lowerLeft.getData()[1]) / 2;

                double width = Math.sqrt((Math.pow((upperRight.getData()[1] - upperLeft.getData()[1]), 2))
                        + (Math.pow((upperRight.getData()[0] - upperLeft.getData()[0]), 2)));
                double height = Math.sqrt((Math.pow((dblMidPointTopy - dblMidPointBoty), 2))
                        + (Math.pow((dblMidPointTopx - dblMidPointBotx), 2)));

                //width is equal to 254 mm, so width of beacon is 220mm, height of beacon is 150mm
                //pixels per mm width, using a known size of the target
                double dblWidthPixelsPermm = width / TARGET_WIDTH;
                double dblHeightPixelsPermm = height / TARGET_HEIGHT;

                //beacon base is about 25mm above top of target
                beaconBotRight = new Point((dblMidPointTopx + (110 * dblWidthPixelsPermm)),
                        dblMidPointTopy - (30 * dblHeightPixelsPermm));
                beaconTopLeft = new Point((dblMidPointTopx - (110 * dblWidthPixelsPermm)),
                        dblMidPointTopy - (160 * dblHeightPixelsPermm));

                beaconMiddle.x = dblMidPointTopx;
                beaconMiddle.y = dblMidPointTopy + (105 * dblHeightPixelsPermm);

                gotBeacomDims = true;

                if (debug >= 1) {
                    fileLogger.writeEvent("Vuforia", "upperLeft 0 " + upperLeft.getData()[0]);
                    fileLogger.writeEvent("Vuforia", "upperLeft 1 " + upperLeft.getData()[1]);
                    Log.d("Vuforia", "upperLeft 0 " + upperLeft.getData()[0]);
                    Log.d("Vuforia", "upperLeft 1 " + upperLeft.getData()[1]);

                    fileLogger.writeEvent("Vuforia", "upperRight 0 " + upperRight.getData()[0]);
                    fileLogger.writeEvent("Vuforia", "upperRight 1 " + upperRight.getData()[1]);
                    Log.d("Vuforia", "upperRight 0 " + upperRight.getData()[0]);
                    Log.d("Vuforia", "upperRight 1 " + upperRight.getData()[1]);

                    fileLogger.writeEvent("Vuforia", "lowerLeft 0 " + lowerLeft.getData()[0]);
                    fileLogger.writeEvent("Vuforia", "lowerLeft 1 " + lowerLeft.getData()[1]);
                    Log.d("Vuforia", "lowerLeft 0 " + lowerLeft.getData()[0]);
                    Log.d("Vuforia", "lowerLeft 1 " + lowerLeft.getData()[1]);

                    fileLogger.writeEvent("Vuforia", "lowerRight 0 " + lowerRight.getData()[0]);
                    fileLogger.writeEvent("Vuforia", "lowerRight 1 " + lowerRight.getData()[1]);
                    Log.d("Vuforia", "lowerRight 0 " + lowerRight.getData()[0]);
                    Log.d("Vuforia", "lowerRight 1 " + lowerRight.getData()[1]);

                    fileLogger.writeEvent("Vuforia", "dblMidPointTopx " + dblMidPointTopx);
                    fileLogger.writeEvent("Vuforia", "dblMidPointTopy " + dblMidPointTopy);
                    fileLogger.writeEvent("Vuforia", "dblMidPointBotx " + dblMidPointBotx);
                    fileLogger.writeEvent("Vuforia", "dblMidPointBoty " + dblMidPointBoty);
                    Log.d("Vuforia", "dblMidPointTopx " + dblMidPointTopx);
                    Log.d("Vuforia", "dblMidPointTopy " + dblMidPointTopy);
                    Log.d("Vuforia", "dblMidPointBotx " + dblMidPointBotx);
                    Log.d("Vuforia", "dblMidPointBoty " + dblMidPointBoty);

                    fileLogger.writeEvent("Vuforia", "width in pixels " + width);
                    fileLogger.writeEvent("Vuforia", "height in pixels " + height);
                    Log.d("Vuforia", "width in pixels " + width);
                    Log.d("Vuforia", "height in pixels " + height);
                }
            }
        }

        if (gotBeacomDims) {

            VuforiaLocalizer.CloseableFrame frame = vuforia.getFrameQueue().take(); //takes the frame at the head of the queue

            long numImages = frame.getNumImages();

            for (int i = 0; i < numImages; i++) {
                if (frame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
                    rgb = frame.getImage(i);
                    break;
                }
            }

            /*rgb is now the Image object that weve used in the video*/
            Log.d("OPENCV", "Height " + rgb.getHeight() + " Width " + rgb.getWidth());

            Bitmap bm = Bitmap.createBitmap(rgb.getWidth(), rgb.getHeight(), Bitmap.Config.RGB_565);
            bm.copyPixelsFromBuffer(rgb.getPixels());
            Mat tmp = new Mat(rgb.getWidth(), rgb.getHeight(), CvType.CV_8UC4);
            Utils.bitmapToMat(bm, tmp);

            if (beaconTopLeft.x < 0)
                beaconTopLeft.x = 0;
            if (beaconTopLeft.y < 0)
                beaconTopLeft.y = 0;
            if (beaconBotRight.x > rgb.getWidth())
                beaconBotRight.x = rgb.getWidth();
            if (beaconBotRight.y > rgb.getHeight())
                beaconBotRight.y = rgb.getHeight();

            frame.close();
            //Constants.BeaconColours Colour = beaconColour.beaconAnalysisOCV(tmp, loop);
            //Constants.BeaconColours Colour = beaconColour.beaconAnalysisOCV2(tmp, loop, debug);
            Colour = beaconColour.BeaconAnalysisOCVPlayground(debug, tmp, loop, beaconTopLeft, beaconBotRight,
                    beaconMiddle);
            loop++;
            Log.d("OPENCV", "Returned " + Colour);
        }

        for (VuforiaTrackable trackable : allTrackables) {
            /**
             * getUpdatedRobotLocation() will return null if no new information is available since
             * the last time that call was made, or if the trackable is not currently visible.
             * getRobotLocation() will return null if the trackable is not currently visible.
             */
            telemetry.addData(trackable.getName(),
                    ((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible() ? "Visible"
                            : "Not Visible"); //

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) trackable.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;

            }
        }
        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            // Then you can extract the positions and angles using the getTranslation and getOrientation methods.
            VectorF trans = lastLocation.getTranslation();
            Orientation rot = Orientation.getOrientation(lastLocation, AxesReference.EXTRINSIC, AxesOrder.XYZ,
                    AngleUnit.DEGREES);
            // Robot position is defined by the standard Matrix translation (x and y)
            robotX = trans.get(0);
            robotY = trans.get(1);

            // Robot bearing (in Cartesian system) position is defined by the standard Matrix z rotation
            robotBearing = rot.thirdAngle;
            if (robotBearing < 0) {
                robotBearing = 360 + robotBearing;
            }

            telemetry.addData("Pos X ", robotX);
            telemetry.addData("Pos Y ", robotY);
            telemetry.addData("Bear  ", robotBearing);
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos   ", format(lastLocation));

            telemetry.addData("Text ", "*** Vision Data***");
            //telemetry.addData("Red  ", "Red :  " + redpoint);
            //telemetry.addData("Blue ", "Blue:  " + bluepoint);
            //telemetry.addData("Dir  ", "Direction:  " + directionOfBeacon);
        } else {
            telemetry.addData("Pos   ", "Unknown");
        }

        switch (Colour) {
        case BEACON_BLUE_RED:
            telemetry.addData("Beacon ", "Blue Red");
            break;
        case BEACON_RED_BLUE:
            telemetry.addData("Beacon ", "Red Blue");
            break;
        case BEACON_BLUE_LEFT:
            telemetry.addData("Beacon ", "Blue Left");
            break;
        case BEACON_RED_LEFT:
            telemetry.addData("Beacon ", "Red Left");
            break;
        case BEACON_BLUE_RIGHT:
            telemetry.addData("Beacon ", "Blue Right");
            break;
        case BEACON_RED_RIGHT:
            telemetry.addData("Beacon ", "Red Right");
            break;
        case UNKNOWN:
            telemetry.addData("Beacon ", "Unknown");
            break;
        }

        telemetry.update();

    }

    //stop the log
    if (fileLogger != null) {
        fileLogger.writeEvent("stop()", "Stopped");
        fileLogger.close();
        fileLogger = null;
    }

}

From source file:club.towr5291.opmodes.AutoDriveTeam5291.java

License:Open Source License

@Override
public void runOpMode() throws InterruptedException {
    final boolean LedOn = false;
    final boolean LedOff = true;

    //load menu settings and setup robot and debug level
    sharedPreferences = PreferenceManager.getDefaultSharedPreferences(hardwareMap.appContext);
    teamNumber = sharedPreferences.getString("club.towr5291.Autonomous.TeamNumber", "0000");
    allianceColor = sharedPreferences.getString("club.towr5291.Autonomous.Color", "Red");
    allianceStartPosition = sharedPreferences.getString("club.towr5291.Autonomous.StartPosition", "Left");
    allianceParkPosition = sharedPreferences.getString("club.towr5291.Autonomous.ParkPosition", "Vortex");
    delay = Integer.parseInt(sharedPreferences.getString("club.towr5291.Autonomous.Delay", "0"));
    numBeacons = sharedPreferences.getString("club.towr5291.Autonomous.Beacons", "One");
    robotConfig = sharedPreferences.getString("club.towr5291.Autonomous.RobotConfig", "TileRunner-2x40");
    debug = Integer.parseInt(sharedPreferences.getString("club.towr5291.Autonomous.Debug", "1"));

    telemetry.addData("Init1     ", "Starting!");
    telemetry.update();//from  w  w w.j ava 2 s.co  m
    //
    // get a reference to a Modern Robotics DIM, and IO channels.
    dim = hardwareMap.get(DeviceInterfaceModule.class, "dim"); //  Use generic form of device mapping
    dim.setDigitalChannelMode(GREEN1_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(RED1_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(BLUE1_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(GREEN2_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(RED2_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    dim.setDigitalChannelMode(BLUE2_LED_CHANNEL, DigitalChannelController.Mode.OUTPUT); // Set the direction of each channel
    LedState(LedOn, LedOn, LedOn, LedOn, LedOn, LedOn);

    //load variables
    LibraryStateSegAuto processingSteps = new LibraryStateSegAuto(0, 0, "", false, false, 0, 0, 0, 0, 0, 0, 0);
    sixValues[] pathValues = new sixValues[1000];
    A0Star a0Star = new A0Star();
    String fieldOutput;
    HashMap<String, LibraryStateSegAuto> autonomousStepsAStar = new HashMap<>();

    if (debug >= 1) {
        fileLogger = new FileLogger(runtime);
        fileLogger.open();
        fileLogger.write("Time,SysMS,Thread,Event,Desc");
        fileLogger.writeEvent(TAG, "Log Started");
        Log.d(TAG, "Log Started");
        runtime.reset();
        telemetry.addData("FileLogger: ", runtime.toString());
        telemetry.addData("FileLogger Op Out File: ", fileLogger.getFilename());
    }

    //load all the vuforia stuff
    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;

    switch (teamNumber) {
    case "5291":
        parameters.vuforiaLicenseKey = "AVATY7T/////AAAAGQJxfNYzLUgGjSx0aOEU0Q0rpcfZO2h2sY1MhUZUr+Bu6RgoUMUP/nERGmD87ybv1/lM2LBFDxcBGRHkXvxtkHel4XEUCsNHFTGWYcVkMIZqctQsIrTe13MnUvSOfQj8ig7xw3iULcwDpY+xAftW61dKTJ0IAOCxx2F0QjJWqRJBxrEUR/DfQi4LyrgnciNMXCiZ8KFyBdC63XMYkQj2joTN579+2u5f8aSCe8jkAFnBLcB1slyaU9lhnlTEMcFjwrLBcWoYIFAZluvFT0LpqZRlS1/XYf45QBSJztFKHIsj1rbCgotAE36novnAQBs74ewnWsJifokJGOYWdFJveWzn3GE9OEH23Y5l7kFDu4wc";
        break;
    case "11230":
        parameters.vuforiaLicenseKey = "Not Provided";
        break;
    case "11231":
        parameters.vuforiaLicenseKey = "Aai2GEX/////AAAAGaIIK9GK/E5ZsiRZ/jrJzdg7wYZCIFQ7uzKqQrMx/0Hh212zumzIy4raGwDY6Mf6jABMShH2etZC/BcjIowIHeAG5ShG5lvZIZEplTO+1zK1nFSiGFTPV59iGVqH8KjLbQdgUbsCBqp4f3tI8BWYqAS27wYIPfTK697SuxdQnpEZAOhHpgz+S2VoShgGr+EElzYMBFEaj6kdA/Lq5OwQp31JPet7NWYph6nN+TNHJAxnQBkthYmQg687WlRZhYrvNJepnoEwsDO3NSyeGlFquwuQwgdoGjzq2qn527I9tvM/XVZt7KR1KyWCn3PIS/LFvADSuyoQ2lsiOFtM9C+KCuNWiqQmj7dPPlpvVeUycoDH";
        break;
    }

    VuforiaLocalizer vuforia = ClassFactory.createVuforiaLocalizer(parameters);
    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true); //enables RGB565 format for the image
    vuforia.setFrameQueueCapacity(1); //tells VuforiaLocalizer to only store one frame at a time
    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);

    VuforiaTrackables velocityVortex = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    VuforiaTrackable wheels = velocityVortex.get(0);
    wheels.setName("wheels"); // wheels target

    VuforiaTrackable tools = velocityVortex.get(1);
    tools.setName("tools"); // tools target

    VuforiaTrackable legos = velocityVortex.get(2);
    legos.setName("legos"); // legos target

    VuforiaTrackable gears = velocityVortex.get(3);
    gears.setName("gears"); // gears target

    /** For convenience, gather together all the trackable objects in one easily-iterable collection */
    List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
    allTrackables.addAll(velocityVortex);

    Vuforia.setFrameFormat(PIXEL_FORMAT.RGB565, true);

    /**
     * We use units of mm here because that's the recommended units of measurement for the
     * size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
     *      <ImageTarget name="stones" size="247 173"/>
     * You don't *have to* use mm here, but the units here and the units used in the XML
     * target configuration files *must* correspond for the math to work out correctly.
     */
    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels

    /**
     * In order for localization to work, we need to tell the system where each target we
     * wish to use for navigation resides on the field, and we need to specify where on the robot
     * the phone resides. These specifications are in the form of <em>transformation matrices.</em>
     * Transformation matrices are a central, important concept in the math here involved in localization.
     * See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
     * for detailed information. Commonly, you'll encounter transformation matrices as instances
     * of the {@link OpenGLMatrix} class.
     *
     * For the most part, you don't need to understand the details of the math of how transformation
     * matrices work inside (as fascinating as that is, truly). Just remember these key points:
     * <ol>
     *
     *     <li>You can put two transformations together to produce a third that combines the effect of
     *     both of them. If, for example, you have a rotation transform R and a translation transform T,
     *     then the combined transformation matrix RT which does the rotation first and then the translation
     *     is given by {@code RT = T.multiplied(R)}. That is, the transforms are multiplied in the
     *     <em>reverse</em> of the chronological order in which they applied.</li>
     *
     *     <li>A common way to create useful transforms is to use methods in the {@link OpenGLMatrix}
     *     class and the Orientation class. See, for example, {@link OpenGLMatrix#translation(float,
     *     float, float)}, {@link OpenGLMatrix#rotation(AngleUnit, float, float, float, float)}, and
     *     {@link Orientation#getRotationMatrix(AxesReference, AxesOrder, AngleUnit, float, float, float)}.
     *     Related methods in {@link OpenGLMatrix}, such as {@link OpenGLMatrix#rotated(AngleUnit,
     *     float, float, float, float)}, are syntactic shorthands for creating a new transform and
     *     then immediately multiplying the receiver by it, which can be convenient at times.</li>
     *
     *     <li>If you want to break open the black box of a transformation matrix to understand
     *     what it's doing inside, use {@link MatrixF#getTranslation()} to fetch how much the
     *     transform will move you in x, y, and z, and use {@link Orientation#getOrientation(MatrixF,
     *     AxesReference, AxesOrder, AngleUnit)} to determine the rotational motion that the transform
     *     will impart. See {@link #format(OpenGLMatrix)} below for an example.</li>
     *
     * </ol>
     *
     * This example places the "stones" image on the perimeter wall to the Left
     *  of the Red Driver station wall.  Similar to the Red Beacon Location on the Res-Q
     *
     * This example places the "chips" image on the perimeter wall to the Right
     *  of the Blue Driver station.  Similar to the Blue Beacon Location on the Res-Q
     *
     * See the doc folder of this project for a description of the field Axis conventions.
     *
     * Initially the target is conceptually lying at the origin of the field's coordinate system
     * (the center of the field), facing up.
     *
     * In this configuration, the target's coordinate system aligns with that of the field.
     *
     * In a real situation we'd also account for the vertical (Z) offset of the target,
     * but for simplicity, we ignore that here; for a real robot, you'll want to fix that.
     *
     * To place the Wheels Target on the Red Audience wall:
     * - First we rotate it 90 around the field's X axis to flip it upright
     * - Then we rotate it  90 around the field's Z access to face it away from the audience.
     * - Finally, we translate it back along the X axis towards the red audience wall.
     *
     */

    // RED Targets
    // To Place GEARS Target
    // position is approximately - (-6feet, -1feet)

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -1 * 12 * mmPerInch, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    gears.setLocation(gearsTargetLocationOnField);

    // To Place GEARS Target
    // position is approximately - (-6feet, 3feet)
    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 2, 3 * 12 * mmPerInch, 0)
            //.translation(0, mmFTCFieldWidth/2, 0)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    tools.setLocation(toolsTargetLocationOnField);

    //Finish RED Targets

    // BLUE Targets
    // To Place LEGOS Target
    // position is approximately - (-3feet, 6feet)

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-3 * 12 * mmPerInch, mmFTCFieldWidth / 2, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    legos.setLocation(legosTargetLocationOnField);

    // To Place WHEELS Target
    // position is approximately - (1feet, 6feet)
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(1 * 12 * mmPerInch, mmFTCFieldWidth / 2, 0).multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    wheels.setLocation(wheelsTargetLocationOnField);

    //Finish BLUE Targets

    /**
     * Create a transformation matrix describing where the phone is on the robot. Here, we
     * put the phone on the right hand side of the robot with the screen facing in (see our
     * choice of BACK camera above) and in landscape mode. Starting from alignment between the
     * robot's and phone's axes, this is a rotation of -90deg along the Y axis.
     *
     * When determining whether a rotation is positive or negative, consider yourself as looking
     * down the (positive) axis of rotation from the positive towards the origin. Positive rotations
     * are then CCW, and negative rotations CW. An example: consider looking down the positive Z
     * axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
     * plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
     */
    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation((mmBotWidth / 2), 0, 300)
            .multiplied(Orientation.getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES,
                    -90, 0, 0));
    //RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));

    /**
     * Let the trackable listeners we care about know where the phone is. We know that each
     * listener is a {@link VuforiaTrackableDefaultListener} and can so safely cast because
     * we have not ourselves installed a listener of a different type.
     */
    ((VuforiaTrackableDefaultListener) gears.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) tools.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) legos.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) wheels.getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /**
     * A brief tutorial: here's how all the math is going to work:
     *
     * C = phoneLocationOnRobot  maps   phone coords -> robot coords
     * P = tracker.getPose()     maps   image target coords -> phone coords
     * L = redTargetLocationOnField maps   image target coords -> field coords
     *
     * So
     *
     * C.inverted()              maps   robot coords -> phone coords
     * P.inverted()              maps   phone coords -> imageTarget coords
     *
     * Putting that all together,
     *
     * L x P.inverted() x C.inverted() maps robot coords to field coords.
     *
     * @see VuforiaTrackableDefaultListener#getRobotLocation()
     */
    telemetry.addData("Init2     ", "Vuforia Options Loaded!");
    telemetry.update();

    //to add more config options edit strings.xml and AutonomousConfiguration.java
    switch (robotConfig) {
    case "TileRunner-2x40": //Velocity Vortex Competition Base
        REVERSE_DIRECTION = 1; // Reverse the direction without significant code changes, (using motor FORWARD REVERSE will affect the driver station as we use same robotconfig file
        COUNTS_PER_MOTOR_REV = 1120; // eg: TETRIX = 1440 pulses, NeveRest 20 = 560 pulses, NeveRest 40 =  1120, NeveRest 60 = 1680 pulses
        DRIVE_GEAR_REDUCTION = 0.7; // This is < 1.0 if geared UP, Tilerunner is geared up
        WHEEL_DIAMETER_INCHES = 4.0; // For figuring circumference
        WHEEL_ACTUAL_FUDGE = 1; // Fine tuning amount
        COUNTS_PER_INCH = ((COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / (WHEEL_DIAMETER_INCHES * 3.1415))
                * WHEEL_ACTUAL_FUDGE * REVERSE_DIRECTION;
        ROBOT_TRACK = 16.5; //  distance between centerline of rear wheels robot will pivot on rear wheel of omni on front, 16.5 track is 103.67 inches full circle
        WHEEL_TURN_FUDGE = 1.0; // Fine tuning amount
        COUNTS_PER_DEGREE = (((2 * 3.1415 * ROBOT_TRACK) * COUNTS_PER_INCH) / 360) * WHEEL_TURN_FUDGE;
        loadPowerTableTileRunner(); //load the power table
        break;
    case "5291 Tank Tread-2x40 Custom": //for tank tread base
        REVERSE_DIRECTION = 1;
        COUNTS_PER_MOTOR_REV = 1120; // eg: TETRIX = 1440 pulses, NeveRest 20 = 560 pulses, NeveRest 40 =  1120, NeveRest 60 = 1680 pulses
        DRIVE_GEAR_REDUCTION = 1.0; // Tank Tread is 1:1 ration
        WHEEL_DIAMETER_INCHES = 3.75; // For figuring circumference
        WHEEL_ACTUAL_FUDGE = 1; // Fine tuning amount
        COUNTS_PER_INCH = ((COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / (WHEEL_DIAMETER_INCHES * 3.1415))
                * WHEEL_ACTUAL_FUDGE * REVERSE_DIRECTION;
        ROBOT_TRACK = 18; //  distance between centerline of rear wheels robot will pivot on rear wheel of omni on front, 16.5 track is 103.67 inches full circle
        WHEEL_TURN_FUDGE = 1.12; // Fine tuning amount
        COUNTS_PER_DEGREE = (((2 * 3.1415 * ROBOT_TRACK) * COUNTS_PER_INCH) / 360) * WHEEL_TURN_FUDGE;
        loadPowerTableTankTread(); //load the power table
        break;
    case "11231 2016 Custom": //2016 - 11231 Drivetrain
        COUNTS_PER_MOTOR_REV = 1120; // eg: TETRIX = 1440 pulses, NeveRest 20 = 560 pulses, NeveRest 40 =  1120, NeveRest 60 = 1680 pulses
        DRIVE_GEAR_REDUCTION = .667; // (.665) UP INCREASES THE DISTANCE This is < 1.0 if geared UP, Tilerunner is geared up
        WHEEL_DIAMETER_INCHES = 4.0; // For figuring circumference
        WHEEL_ACTUAL_FUDGE = 1; // Fine tuning amount
        COUNTS_PER_INCH = ((COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION)
                / (WHEEL_DIAMETER_INCHES * 3.1415926535)) * WHEEL_ACTUAL_FUDGE;
        ROBOT_TRACK = 18; //  distance between centerline of rear wheels robot will pivot on rear wheel of omni on front, 16.5 track is 103.67 inches full circle
        COUNTS_PER_DEGREE = ((2 * 3.1415926535 * ROBOT_TRACK) * COUNTS_PER_INCH) / 360;
        //loadPowerTableTileRunner();                                                         //load the power table
        break;
    default: //default for competition TileRunner-2x40
        REVERSE_DIRECTION = 1;
        COUNTS_PER_MOTOR_REV = 1120; // eg: TETRIX = 1440 pulses, NeveRest 20 = 560 pulses, NeveRest 40 =  1120, NeveRest 60 = 1680 pulses
        DRIVE_GEAR_REDUCTION = 1.28; // This is < 1.0 if geared UP, Tilerunner is geared up
        WHEEL_DIAMETER_INCHES = 4.0; // For figuring circumference
        WHEEL_ACTUAL_FUDGE = 1; // Fine tuning amount
        COUNTS_PER_INCH = ((COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / (WHEEL_DIAMETER_INCHES * 3.1415))
                * WHEEL_ACTUAL_FUDGE * REVERSE_DIRECTION;
        ROBOT_TRACK = 16.5; //  distance between centerline of rear wheels robot will pivot on rear wheel of omni on front, 16.5 track is 103.67 inches full circle
        COUNTS_PER_DEGREE = ((2 * 3.1415 * ROBOT_TRACK) * COUNTS_PER_INCH) / 360;
        loadPowerTableTileRunner(); //load the power table
        break;
    }

    if (debug >= 1) {
        fileLogger.writeEvent(TAG, "Team #             " + teamNumber);
        fileLogger.writeEvent(TAG, "Alliance Colour    " + allianceColor);
        fileLogger.writeEvent(TAG, "Alliance Start Pos " + allianceStartPosition);
        fileLogger.writeEvent(TAG, "Alliance Park Pos  " + allianceParkPosition);
        fileLogger.writeEvent(TAG, "Alliance Delay     " + delay);
        fileLogger.writeEvent(TAG, "Alliance Beacons   " + numBeacons);
        fileLogger.writeEvent(TAG, "Robot Config       " + robotConfig);
        Log.d(TAG, "Team #             " + teamNumber);
        Log.d(TAG, "Alliance Colour    " + allianceColor);
        Log.d(TAG, "Alliance Start Pos " + allianceStartPosition);
        Log.d(TAG, "Alliance Park Pos  " + allianceParkPosition);
        Log.d(TAG, "Alliance Delay     " + delay);
        Log.d(TAG, "Alliance Beacons   " + numBeacons);
        Log.d(TAG, "Robot Config       " + robotConfig);
    }

    telemetry.addData("Init3       ", "Loading Steps");
    telemetry.update();

    //load the sequence based on alliance colour and team
    switch (teamNumber) {
    case "5291":
        switch (allianceColor) {
        case "Red":
            LedState(LedOff, LedOn, LedOff, LedOff, LedOn, LedOff);
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291RedLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291RedRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Blue":
            LedState(LedOff, LedOff, LedOn, LedOff, LedOff, LedOn);
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291BlueLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291BlueRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Test":
            autonomousSteps = autonomousStepsFile.ReadStepFile("5291Test.csv", allianceParkPosition,
                    numBeacons);
            break;
        }
        break;

    case "11230":
        switch (allianceColor) {
        case "Red":
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11230RedLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11230RedRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Blue":
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291BlueRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("5291BlueRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Test":
            autonomousSteps = autonomousStepsFile.ReadStepFile("11230Test.csv", allianceParkPosition,
                    numBeacons);
            break;
        }
        break;

    case "11231":
        switch (allianceColor) {
        case "Red":
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11231RedLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11231RedRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Blue":
            switch (allianceStartPosition) {
            case "Left":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11231BleLeft.csv", allianceParkPosition,
                        numBeacons);
                break;
            case "Right":
                autonomousSteps = autonomousStepsFile.ReadStepFile("11231BlueRight.csv", allianceParkPosition,
                        numBeacons);
                break;
            }
            break;
        case "Test":
            autonomousSteps = autonomousStepsFile.ReadStepFile("11231Test.csv", allianceParkPosition,
                    numBeacons);
            break;
        }
        break;
    }

    //need to load initial step of a delay based on user input
    autonomousStepsFile.insertSteps(delay + 1, "DEL" + (delay * 1000), false, false, 0, 0, 0, 0, 0, 0, 0, 1);

    // Set up the parameters with which we will use our IMU. Note that integration
    // algorithm here just reports accelerations to the logcat log; it doesn't actually
    // provide positional information.
    BNO055IMU.Parameters parametersAdafruitImu = new BNO055IMU.Parameters();
    parametersAdafruitImu.angleUnit = BNO055IMU.AngleUnit.DEGREES;
    parametersAdafruitImu.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
    parametersAdafruitImu.calibrationDataFile = "AdafruitIMUCalibration.json"; // see the calibration sample opmode
    parametersAdafruitImu.loggingEnabled = true;
    parametersAdafruitImu.loggingTag = "IMU";
    parametersAdafruitImu.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator();

    //don't crash the program if the GRYO is faulty, just bypass it
    try {
        // Retrieve and initialize the IMU. We expect the IMU to be attached to an I2C port
        // on a Core Device Interface Module, configured to be a sensor of type "AdaFruit IMU",
        // and named "imu".
        imu = hardwareMap.get(BNO055IMU.class, "imu");
        imu.initialize(parametersAdafruitImu);

        // get a reference to a Modern Robotics GyroSensor object.
        gyro = (ModernRoboticsI2cGyro) hardwareMap.gyroSensor.get("gyro");
        // calibrate the gyro, this takes a few seconds
        gyro.calibrate();
        telemetry.addData("Init4       ", "Calibrating Gyro");
        telemetry.update();
    } catch (Exception e) {
        if (debug >= 1) {
            fileLogger.writeEvent(TAG, "Gyro Error " + e.getMessage());
            Log.d(TAG, "Gyro Error " + e.getMessage());
        }
        gyroError = true;
    }

    telemetry.addData("Init5       ", "Line Sensors");
    telemetry.update();

    LineSensor1 = hardwareMap.get(AnalogInput.class, "linesensor1");
    LineSensor2 = hardwareMap.get(AnalogInput.class, "linesensor2");
    LineSensor3 = hardwareMap.get(AnalogInput.class, "linesensor3");
    LineSensor4 = hardwareMap.get(AnalogInput.class, "linesensor4");
    LineSensor5 = hardwareMap.get(AnalogInput.class, "linesensor5");
    /*
    * Initialize the drive system variables.
    * The init() method of the hardware class does all the work here
    */

    telemetry.addData("Init6       ", "Base and Arm Motors");
    telemetry.update();

    robotDrive.init(hardwareMap);
    armDrive.init(hardwareMap);

    telemetry.addData("Init7       ", "Range Sensors");
    telemetry.update();

    RANGE1 = hardwareMap.i2cDevice.get("range1");
    RANGE1Reader = new I2cDeviceSynchImpl(RANGE1, RANGE1ADDRESS, false);
    RANGE1Reader.engage();
    RANGE2 = hardwareMap.i2cDevice.get("range2");
    RANGE2Reader = new I2cDeviceSynchImpl(RANGE2, RANGE2ADDRESS, false);
    RANGE2Reader.engage();

    // get a reference to our ColorSensor object.
    try {
        telemetry.addData("Init8       ", "Colour Sensor");
        telemetry.update();
        colorSensor = hardwareMap.colorSensor.get("sensorcolor");
    } catch (Exception e) {
        if (debug >= 1) {
            fileLogger.writeEvent(TAG, "colour Error " + e.getMessage());
            Log.d(TAG, "colour Error " + e.getMessage());
        }
        colourError = true;
    }

    telemetry.addData("Init9       ", "Servos");
    telemetry.update();

    //config the servos
    servoBeaconRight = hardwareMap.servo.get("servobeaconright");
    servoBeaconLeft = hardwareMap.servo.get("servobeaconleft");
    servoBeaconRight.setDirection(Servo.Direction.REVERSE);
    servoLifterRight = hardwareMap.servo.get("servoliftright");
    servoLifterLeft = hardwareMap.servo.get("servoliftleft");
    servoLifterRight.setDirection(Servo.Direction.REVERSE);
    //lock the arms up
    moveServo(servoLifterRight, 135, SERVOLIFTRIGHT_MIN_RANGE, SERVOLIFTRIGHT_MAX_RANGE);
    moveServo(servoLifterLeft, 135, SERVOLIFTLEFT_MIN_RANGE, SERVOLIFTLEFT_MAX_RANGE);

    // Move the beacon pushers to home
    moveServo(servoBeaconRight, SERVOBEACONRIGHT_HOME, SERVOBEACONRIGHT_MIN_RANGE, SERVOBEACONRIGHT_MAX_RANGE);
    moveServo(servoBeaconLeft, SERVOBEACONLEFT_HOME, SERVOBEACONLEFT_MIN_RANGE, SERVOBEACONLEFT_MAX_RANGE);

    // Send telemetry message to signify robot waiting;
    telemetry.addData("Status", "Resetting Encoders"); //

    robotDrive.leftMotor1.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
    robotDrive.leftMotor2.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
    robotDrive.rightMotor1.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
    robotDrive.rightMotor2.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);

    robotDrive.leftMotor1.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
    robotDrive.leftMotor2.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
    robotDrive.rightMotor1.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
    robotDrive.rightMotor2.setMode(DcMotor.RunMode.RUN_USING_ENCODER);

    mintCurStStep = stepState.STATE_INIT;
    mintCurStTankTurn = stepState.STATE_COMPLETE;
    mintCurStDrive = stepState.STATE_COMPLETE;
    mintCurStDriveHeading = stepState.STATE_COMPLETE;
    mintCurStPivotTurn = stepState.STATE_COMPLETE;
    mintCurStRadiusTurn = stepState.STATE_COMPLETE;
    mintCurStDelay = stepState.STATE_COMPLETE;
    mintCurStVuforiaLoc5291 = stepState.STATE_COMPLETE;
    mintCurStVuforiaMove5291 = stepState.STATE_COMPLETE;
    mintCurStVuforiaTurn5291 = stepState.STATE_COMPLETE;
    mintCurStAttackBeacon5291 = stepState.STATE_COMPLETE;
    mintCurStBeaconColour5291 = stepState.STATE_COMPLETE;
    mintCurStShootParticle5291 = stepState.STATE_COMPLETE;
    mintCurStSweeper5291 = stepState.STATE_COMPLETE;
    mintCurStEyelids5291 = stepState.STATE_COMPLETE;
    mintCurStTankTurnGyroHeading = stepState.STATE_COMPLETE;
    mintCurStLineFind5291 = stepState.STATE_COMPLETE;
    mintCurStGyroTurnEncoder5291 = stepState.STATE_COMPLETE;

    mint5291LEDStatus = LEDState.STATE_TEAM;

    mblnNextStepLastPos = false;

    if (!gyroError) {
        while (!isStopRequested() && gyro.isCalibrating()) {
            sleep(50);
            idle();
        }
        telemetry.addData("Init10      ", "Calibrated Gyro");
        telemetry.update();
    }

    if (debug >= 1) {
        fileLogger.writeEvent(TAG, "Init Complete");
        Log.d(TAG, "Init Complete");
    }

    //set up variable for our capturedimage
    Image rgb = null;

    //activate vuforia
    velocityVortex.activate();

    //show options on the driver station phone
    telemetry.addData("Init11     ", "Complete");
    telemetry.addData("Team #     ", teamNumber);
    telemetry.addData("Alliance   ", allianceColor);
    telemetry.addData("Start Pos  ", allianceStartPosition);
    telemetry.addData("Park Pos   ", allianceParkPosition);
    telemetry.addData("Start Del  ", delay);
    telemetry.addData("# Beacons  ", numBeacons);
    telemetry.addData("Robot      ", robotConfig);
    telemetry.update();

    // Wait for the game to start (driver presses PLAY)
    waitForStart();
    if (debug >= 1) {
        fileLogger.writeEvent(TAG, "Value of Gyro Before Reset " + gyro.getIntegratedZValue());
        Log.d(TAG, "Value of Gyro Before Reset " + gyro.getIntegratedZValue());
    }

    imu.startAccelerationIntegration(new Position(), new Velocity(), 1000);
    gyro.resetZAxisIntegrator();

    //the main loop.  this is where the action happens
    while (opModeIsActive()) {
        if (!mblnDisableVisionProcessing) {
            //start capturing frames for analysis
            if (mblnReadyToCapture) {

                boolean gotBeacomDims = false;
                boolean beacFound = false;
                Point beaconBotRight = new Point(0, 0);
                Point beaconTopLeft = new Point(0, 0);
                Point beaconMiddle = new Point(0, 0);

                if (mStateTime.milliseconds() < 1000) {
                    gotBeacomDims = true;
                    beacFound = false;
                }

                if (!gotBeacomDims) {
                    for (VuforiaTrackable beac : velocityVortex) {

                        OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose();

                        if (pose != null) {

                            Matrix34F rawPose = new Matrix34F();
                            float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                            rawPose.setData(poseData);

                            Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                                    new Vec3F(-TARGET_WIDTH / 2, TARGET_HEIGHT / 2, 0));
                            Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                                    new Vec3F(TARGET_WIDTH / 2, TARGET_HEIGHT / 2, 0));
                            Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                                    new Vec3F(TARGET_WIDTH / 2, -TARGET_HEIGHT / 2, 0));
                            Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                                    new Vec3F(-TARGET_WIDTH / 2, -TARGET_HEIGHT / 2, 0));

                            double dblMidPointTopx = (upperRight.getData()[0] + upperLeft.getData()[0]) / 2;
                            double dblMidPointTopy = (upperRight.getData()[1] + upperLeft.getData()[1]) / 2;
                            double dblMidPointBotx = (lowerRight.getData()[0] + lowerLeft.getData()[0]) / 2;
                            double dblMidPointBoty = (lowerRight.getData()[1] + lowerLeft.getData()[1]) / 2;

                            double width = Math.sqrt(
                                    (Math.pow((upperRight.getData()[1] - upperLeft.getData()[1]), 2)) + (Math
                                            .pow((upperRight.getData()[0] - upperLeft.getData()[0]), 2)));
                            double height = Math.sqrt((Math.pow((dblMidPointTopy - dblMidPointBoty), 2))
                                    + (Math.pow((dblMidPointTopx - dblMidPointBotx), 2)));

                            //width is equal to 254 mm, so width of beacon is 220mm, height of beacon is 150mm
                            //pixels per mm width, using a known size of the target
                            double dblWidthPixelsPermm = width / TARGET_WIDTH;
                            double dblHeightPixelsPermm = height / TARGET_HEIGHT;

                            //beacon base is about 25mm above top of target
                            beaconBotRight = new Point((dblMidPointTopx + (110 * dblWidthPixelsPermm)),
                                    dblMidPointTopy - (30 * dblHeightPixelsPermm));
                            beaconTopLeft = new Point((dblMidPointTopx - (110 * dblWidthPixelsPermm)),
                                    dblMidPointTopy - (160 * dblHeightPixelsPermm));

                            beaconMiddle.x = dblMidPointTopx;
                            beaconMiddle.y = dblMidPointTopy + (110 * dblHeightPixelsPermm);

                            gotBeacomDims = true;
                            beacFound = true;

                            if (debug >= 1) {
                                fileLogger.writeEvent("Vuforia", "upperLeft 0 " + upperLeft.getData()[0]);
                                fileLogger.writeEvent("Vuforia", "upperLeft 1 " + upperLeft.getData()[1]);
                                Log.d("Vuforia", "upperLeft 0 " + upperLeft.getData()[0]);
                                Log.d("Vuforia", "upperLeft 1 " + upperLeft.getData()[1]);

                                fileLogger.writeEvent("Vuforia", "upperRight 0 " + upperRight.getData()[0]);
                                fileLogger.writeEvent("Vuforia", "upperRight 1 " + upperRight.getData()[1]);
                                Log.d("Vuforia", "upperRight 0 " + upperRight.getData()[0]);
                                Log.d("Vuforia", "upperRight 1 " + upperRight.getData()[1]);

                                fileLogger.writeEvent("Vuforia", "lowerLeft 0 " + lowerLeft.getData()[0]);
                                fileLogger.writeEvent("Vuforia", "lowerLeft 1 " + lowerLeft.getData()[1]);
                                Log.d("Vuforia", "lowerLeft 0 " + lowerLeft.getData()[0]);
                                Log.d("Vuforia", "lowerLeft 1 " + lowerLeft.getData()[1]);

                                fileLogger.writeEvent("Vuforia", "lowerRight 0 " + lowerRight.getData()[0]);
                                fileLogger.writeEvent("Vuforia", "lowerRight 1 " + lowerRight.getData()[1]);
                                Log.d("Vuforia", "lowerRight 0 " + lowerRight.getData()[0]);
                                Log.d("Vuforia", "lowerRight 1 " + lowerRight.getData()[1]);

                                fileLogger.writeEvent("Vuforia", "dblMidPointTopx " + dblMidPointTopx);
                                fileLogger.writeEvent("Vuforia", "dblMidPointTopy " + dblMidPointTopy);
                                fileLogger.writeEvent("Vuforia", "dblMidPointBotx " + dblMidPointBotx);
                                fileLogger.writeEvent("Vuforia", "dblMidPointBoty " + dblMidPointBoty);
                                Log.d("Vuforia", "dblMidPointTopx " + dblMidPointTopx);
                                Log.d("Vuforia", "dblMidPointTopy " + dblMidPointTopy);
                                Log.d("Vuforia", "dblMidPointBotx " + dblMidPointBotx);
                                Log.d("Vuforia", "dblMidPointBoty " + dblMidPointBoty);

                                fileLogger.writeEvent("Vuforia", "width in pixels " + width);
                                fileLogger.writeEvent("Vuforia", "height in pixels " + height);
                                Log.d("Vuforia", "width in pixels " + width);
                                Log.d("Vuforia", "height in pixels " + height);
                            }
                        }
                    }
                }

                if (gotBeacomDims) {
                    VuforiaLocalizer.CloseableFrame frame = vuforia.getFrameQueue().take(); //takes the frame at the head of the queue
                    long numImages = frame.getNumImages();

                    for (int i = 0; i < numImages; i++) {
                        if (frame.getImage(i).getFormat() == PIXEL_FORMAT.RGB565) {
                            rgb = frame.getImage(i);
                            break;
                        }
                    }
                    /*rgb is now the Image object that weve used in the video*/
                    Bitmap bm = Bitmap.createBitmap(rgb.getWidth(), rgb.getHeight(), Bitmap.Config.RGB_565);
                    bm.copyPixelsFromBuffer(rgb.getPixels());

                    //put the image into a MAT for OpenCV
                    Mat tmp = new Mat(rgb.getWidth(), rgb.getHeight(), CvType.CV_8UC4);
                    Utils.bitmapToMat(bm, tmp);

                    if (beaconTopLeft.x < 0)
                        beaconTopLeft.x = 0;
                    if (beaconTopLeft.y < 0)
                        beaconTopLeft.y = 0;
                    if (beaconBotRight.x > rgb.getWidth())
                        beaconBotRight.x = rgb.getWidth();
                    if (beaconBotRight.y > rgb.getHeight())
                        beaconBotRight.y = rgb.getHeight();

                    //close the frame, prevents memory leaks and crashing
                    frame.close();

                    //analyse the beacons
                    //Constants.BeaconColours Colour = beaconColour.beaconAnalysisOCV(tmp, loop));
                    //mColour = beaconColour.beaconAnalysisOCV(tmp, mintCaptureLoop);
                    mColour = beaconColour.beaconAnalysisOCV2(debug, tmp, mintCaptureLoop, beaconTopLeft,
                            beaconBotRight, beaconMiddle, beacFound);
                    if (debug >= 1) {
                        fileLogger.writeEvent("OPENCV", "Returned " + mColour);
                        Log.d("OPENCV", "Returned " + mColour);
                    }

                    telemetry.addData("Beacon ", mColour);
                    mintCaptureLoop++;
                }
            }

            //use vuforia to get locations informatio
            for (VuforiaTrackable trackable : allTrackables) {
                /**
                 * getUpdatedRobotLocation() will return null if no new information is available since
                 * the last time that call was made, or if the trackable is not currently visible.
                 * getRobotLocation() will return null if the trackable is not currently visible.
                 */
                telemetry.addData(trackable.getName(),
                        ((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible() ? "Visible"
                                : "Not Visible"); //

                OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) trackable
                        .getListener()).getUpdatedRobotLocation();
                if (robotLocationTransform != null) {
                    lastLocation = robotLocationTransform;
                }
            }
            /**
             * Provide feedback as to where the robot was last located (if we know).
             */
            if (lastLocation != null) {
                // Then you can extract the positions and angles using the getTranslation and getOrientation methods.
                VectorF trans = lastLocation.getTranslation();
                Orientation rot = Orientation.getOrientation(lastLocation, AxesReference.EXTRINSIC,
                        AxesOrder.XYZ, AngleUnit.DEGREES);
                // Robot position is defined by the standard Matrix translation (x and y)
                localisedRobotX = trans.get(0);
                localisedRobotY = trans.get(1);

                // Robot bearing (in Cartesian system) position is defined by the standard Matrix z rotation
                localisedRobotBearing = rot.thirdAngle;
                if (localisedRobotBearing < 0) {
                    localisedRobotBearing = 360 + localisedRobotBearing;
                }

                telemetry.addData("Pos X ", localisedRobotX);
                telemetry.addData("Pos Y ", localisedRobotY);
                telemetry.addData("Bear  ", localisedRobotBearing);
                telemetry.addData("Pos   ", format(lastLocation));
                localiseRobotPos = true;
            } else {
                localiseRobotPos = false;
                telemetry.addData("Pos   ", "Unknown");
            }
        }

        switch (mintCurStStep) {
        case STATE_INIT:

            if (debug >= 1) {
                fileLogger.writeEvent(TAG,
                        "mintCurStStep:- " + mintCurStStep + " mintCurStStep " + mintCurStStep);
                fileLogger.writeEvent(TAG, "About to check if step exists " + mintCurrentStep);
                Log.d(TAG, "mintCurStStep:- " + mintCurStStep + " mintCurStStep " + mintCurStStep);
                Log.d(TAG, "About to check if step exists " + mintCurrentStep);
            }
            // get step from hashmap, send it to the initStep for decoding
            if (autonomousSteps.containsKey(String.valueOf(mintCurrentStep))) {
                if (debug >= 1) {
                    fileLogger.writeEvent(TAG,
                            "Step Exists TRUE " + mintCurrentStep + " about to get the values from the step");
                    Log.d(TAG,
                            "Step Exists TRUE " + mintCurrentStep + " about to get the values from the step");
                }
                //processingSteps = autonomousSteps.get(String.valueOf(mintCurrentStep));
                //if (debug >= 1)
                //{
                //    fileLogger.writeEvent(TAG, "Got the values for step " + mintCurrentStep + " about to decode");
                //    Log.d(TAG, "Got the values for step " + mintCurrentStep + " about to decode");
                //}
                //decode the step from hashmap
                //initStep(processingSteps);
                initStep();
            } else {
                mintCurStStep = stepState.STATE_FINISHED;
            }
            break;
        case STATE_START:

            break;
        case STATE_RUNNING:

            loadParallelSteps();
            for (String stKey : mintActiveStepsCopy.keySet()) {
                if (debug >= 1) {
                    fileLogger.writeEvent("STATE_RUNNING", "Looping through Parallel steps, found " + stKey);
                    Log.d("STATE_RUNNING", "Looping through Parallel steps, found " + stKey);
                }
                mintStepNumber = mintActiveStepsCopy.get(stKey);
                loadActiveStep(mintStepNumber);
                if (debug >= 1) {
                    fileLogger.writeEvent("STATE_RUNNING", "About to run " + mstrRobotCommand.substring(0, 3));
                    Log.d("STATE_RUNNING", "About to run " + mstrRobotCommand.substring(0, 3));
                }
                processSteps(mstrRobotCommand.substring(0, 3));
            }

            if ((mintCurStDelay == stepState.STATE_COMPLETE)
                    && (mintCurStBeaconColour5291 == stepState.STATE_COMPLETE)
                    && (mintCurStAttackBeacon5291 == stepState.STATE_COMPLETE)
                    && (mintCurStVuforiaTurn5291 == stepState.STATE_COMPLETE)
                    && (mintCurStVuforiaLoc5291 == stepState.STATE_COMPLETE)
                    && (mintCurStVuforiaMove5291 == stepState.STATE_COMPLETE)
                    && (mintCurStDrive == stepState.STATE_COMPLETE)
                    && (mintCurStDriveHeading == stepState.STATE_COMPLETE)
                    && (mintCurStPivotTurn == stepState.STATE_COMPLETE)
                    && (mintCurStTankTurn == stepState.STATE_COMPLETE)
                    && (mintCurStShootParticle5291 == stepState.STATE_COMPLETE)
                    && (mintCurStSweeper5291 == stepState.STATE_COMPLETE)
                    && (mintCurStEyelids5291 == stepState.STATE_COMPLETE)
                    && (mintCurStLineFind5291 == stepState.STATE_COMPLETE)
                    && (mintCurStGyroTurnEncoder5291 == stepState.STATE_COMPLETE)
                    && (mintCurStTankTurnGyroHeading == stepState.STATE_COMPLETE)
                    && (mintCurStRadiusTurn == stepState.STATE_COMPLETE)) {
                mintCurStStep = stepState.STATE_COMPLETE;
            }

            //make sure we load the current step to determine if parallel, if the steps are run out of order and a previous step was parallel
            //things get all messed up and a step that isn't parallel can be assumed to be parallel
            loadActiveStep(mintCurrentStep);
            if (mblnParallel) {
                //mark this step as complete and do next one, the current step should continue to run.  Not all steps are compatible with being run in parallel
                // like drive steps, turns etc
                // Drive forward and shoot
                // Drive forward and detect beacon
                // are examples of when parallel steps should be run
                // errors will occur if other combinations are run
                // only go to next step if current step equals the one being processed for parallelism.
                for (String stKey : mintActiveStepsCopy.keySet()) {
                    mintStepNumber = mintActiveStepsCopy.get(stKey);
                    if (mintCurrentStep == mintStepNumber)
                        mintCurStStep = stepState.STATE_COMPLETE;
                }
            }
            break;
        case STATE_PAUSE:
            break;
        case STATE_COMPLETE:
            if (debug >= 1) {
                fileLogger.writeEvent(TAG, "Step Complete - Current Step:- " + mintCurrentStep);
                Log.d(TAG, "Step Complete - Current Step:- " + mintCurrentStep);
            }

            //  Transition to a new state and next step.
            mintCurrentStep++;
            mintCurStStep = stepState.STATE_INIT;
            break;
        case STATE_TIMEOUT:
            setDriveMotorPower(0);
            //  Transition to a new state.
            mintCurStStep = stepState.STATE_FINISHED;
            break;
        case STATE_ERROR:
            telemetry.addData("STATE", "ERROR WAITING TO FINISH " + mintCurrentStep);
            break;
        case STATE_FINISHED:
            setDriveMotorPower(0);

            //deactivate vuforia
            velocityVortex.deactivate();

            telemetry.addData("STATE", "FINISHED " + mintCurrentStep);
            if (debug >= 1) {
                if (fileLogger != null) {
                    fileLogger.writeEvent(TAG, "Step FINISHED - FINISHED");
                    fileLogger.writeEvent(TAG, "Stopped");
                    Log.d(TAG, "FileLogger Stopped");
                    fileLogger.close();
                    fileLogger = null;
                }
            }
            break;
        case STATE_ASTAR_PRE_INIT:
            mintCurrentStepAStar = 1; //init the Step for AStar
            //get start point
            //get end point
            int startX = (int) processingSteps.getmRobotParm1();
            int startY = (int) processingSteps.getmRobotParm2();
            int startZ = (int) processingSteps.getmRobotParm3();
            int endX = (int) processingSteps.getmRobotParm4();
            int endY = (int) processingSteps.getmRobotParm5();
            int endDir = (int) processingSteps.getmRobotParm6();

            //before using the path in the command lets check if we can localise
            if (lastLocation != null) {
                //lets get locations for AStar, direction is most important
                //x and y position for Vuforia are in mm, AStar in Inches
                //counter clockwise rotation (x,y) = (-x, y)
                //origin is center of field
                //Astar is top right so need to add in 6 feet to each value
                startX = (int) (localisedRobotX / 25.4) + 72;
                startY = (int) (localisedRobotY / 25.4) + 72;
                //need to rotate the axis -90 degrees
                startZ = (int) localisedRobotBearing;

                if ((startZ > 357) && (startZ < 3))
                    startZ = 90;
                else if ((startZ > 267) && (startZ < 273))
                    startZ = 0;
                else if ((startZ > 177) && (startZ < 183))
                    startZ = 270;
                else if ((startZ > 87) && (startZ < 93))
                    startZ = 180;

                if (debug >= 1) {
                    fileLogger.writeEvent(TAG, "AStar Init - Localised Values");
                    fileLogger.writeEvent(TAG, "AStar Init - localisedRobotX:        " + localisedRobotX);
                    fileLogger.writeEvent(TAG, "AStar Init - localisedRobotY:        " + localisedRobotY);
                    fileLogger.writeEvent(TAG, "AStar Init - localisedRobotBearing:  " + localisedRobotBearing);
                    fileLogger.writeEvent(TAG, "AStar Init - startX:                 " + startX);
                    fileLogger.writeEvent(TAG, "AStar Init - startY:                 " + startY);
                    fileLogger.writeEvent(TAG, "AStar Init - startZ:                 " + startZ);
                    Log.d(TAG, "AStar Init - Localised Values");
                    Log.d(TAG, "AStar Init - localisedRobotX:        " + localisedRobotX);
                    Log.d(TAG, "AStar Init - localisedRobotY:        " + localisedRobotY);
                    Log.d(TAG, "AStar Init - localisedRobotBearing:  " + localisedRobotBearing);
                    Log.d(TAG, "AStar Init - startX:                 " + startX);
                    Log.d(TAG, "AStar Init - startY:                 " + startY);
                    Log.d(TAG, "AStar Init - startZ:                 " + startZ);
                }
            }

            //process path
            pathValues = getPathValues.findPathAStar(startX, startY, startZ, endX, endY, endDir); //for enhanced
            if (debug >= 1) {
                fileLogger.writeEvent(TAG, "AStar Path - length:                 " + pathValues.length);
                Log.d(TAG, "AStar Path - length:                 " + pathValues.length);
            }

            String[][] mapComplete = new String[A0Star.FIELDWIDTH][A0Star.FIELDWIDTH];

            //write path to logfile to verify path
            for (int y = 0; y < a0Star.fieldLength; y++) {
                for (int x = 0; x < a0Star.fieldWidth; x++) {
                    switch (allianceColor) {
                    case "Red":
                        if (a0Star.walkableRed[y][x]) {
                            mapComplete[y][x] = "1";
                            if ((x == startX) && (y == startY))
                                mapComplete[y][x] = "S";
                            else if ((x == endX) && (y == endY))
                                mapComplete[y][x] = "E";
                        } else {
                            mapComplete[y][x] = "0";
                        }
                        break;

                    case "Blue":
                        if (a0Star.walkableBlue[y][x]) {
                            mapComplete[y][x] = "1";
                            if ((x == startX) && (y == startY))
                                mapComplete[y][x] = "S";
                            else if ((x == endX) && (y == endY))
                                mapComplete[y][x] = "E";
                        } else {
                            if ((x == startX) && (y == startY)) {
                                mapComplete[y][x] = "1";
                            } else {
                                mapComplete[y][x] = "0";
                            }
                        }
                        break;

                    default:
                        if (a0Star.walkable[y][x]) {
                            mapComplete[y][x] = "1";
                            if ((x == startX) && (y == startY))
                                mapComplete[y][x] = "S";
                            else if ((x == endX) && (y == endY))
                                mapComplete[y][x] = "E";
                        }
                        break;
                    }
                }
            }

            //plot out path..
            for (int i = 0; i < pathValues.length; i++) {
                if (debug >= 1) {
                    fileLogger.writeEvent(TAG, "Path " + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                            + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                    Log.d(TAG, "Path " + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                            + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                }
                if (((int) pathValues[i].val1 == 0) && ((int) pathValues[i].val3 == 0)
                        && ((int) pathValues[i].val2 == 0) && ((int) pathValues[i].val4 == 0))
                    break;
                mapComplete[(int) pathValues[i].val3][(int) pathValues[i].val2] = "P";
                if ((pathValues[i].val2 == startX) && (pathValues[i].val3 == startY)) {
                    mapComplete[(int) pathValues[i].val3][(int) pathValues[i].val2] = "S";
                }
            }
            mapComplete[endY][endX] = "E";
            fieldOutput = "";

            for (int y = 0; y < a0Star.fieldLength; y++) {
                for (int x = 0; x < a0Star.fieldWidth; x++) {
                    fieldOutput = "" + fieldOutput + mapComplete[y][x];
                }
                if (debug >= 2) {
                    fileLogger.writeEvent(TAG, fieldOutput);
                    Log.d(TAG, fieldOutput);
                }
                fieldOutput = "";
            }

            //load path in Hashmap
            boolean dirChanged;
            boolean processingAStarSteps = true;
            int startSegment = 1;
            int numberOfMoves;
            int key = 0;
            int lastDirection = 0;
            int lasti = 0;
            String strAngleChange = "RT00";

            while (processingAStarSteps) {
                numberOfMoves = 0;
                for (int i = startSegment; i < pathValues.length; i++) {
                    numberOfMoves++;
                    if (debug >= 2) {
                        fileLogger.writeEvent(TAG, "Path " + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                                + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                        Log.d(TAG, "Path " + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                                + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                    }
                    if (((int) pathValues[i].val1 == 0) && ((int) pathValues[i].val2 == 0)
                            && ((int) pathValues[i].val3 == 0)) {
                        if (debug >= 2) {
                            fileLogger.writeEvent(TAG, "End Detected");
                            Log.d(TAG, "End Detected");
                        }
                        //end of the sequence,
                        lastDirection = (int) pathValues[i - 1].val4;
                        processingAStarSteps = false;
                        lasti = i;
                    }
                    //need to check if the first step is in a different direction that the start
                    if (i == 1) {
                        if (startZ != pathValues[i].val4) { //need to turn
                            strAngleChange = getAngle(startZ, (int) pathValues[i].val4);
                            if (debug >= 2) {
                                fileLogger.writeEvent(TAG,
                                        "First Step Need to turn Robot " + strAngleChange + " Path "
                                                + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                                                + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                                fileLogger.writeEvent(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                                        + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                                Log.d(TAG,
                                        "First Step Need to turn Robot " + strAngleChange + " Path "
                                                + pathValues[i].val1 + " " + pathValues[i].val2 + " "
                                                + pathValues[i].val3 + " Dir:= " + pathValues[i].val4);
                                Log.d(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                                        + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                            }
                            autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                                    strAngleChange, false, false, 0, 0, 0, 0, 0, 0, 1));
                            key++;
                            dirChanged = true;
                        } else {
                            dirChanged = false; //no change in direction
                        }
                    } else {
                        //work out the sequence not the first step
                        if (pathValues[i - 1].val4 != pathValues[i].val4) { //need to turn
                            strAngleChange = getAngle((int) pathValues[i - 1].val4, (int) pathValues[i].val4);
                            dirChanged = true;
                        } else {
                            dirChanged = false; //no change in direction
                        }
                    }
                    if ((dirChanged) || (!processingAStarSteps)) {
                        //found end of segment
                        int AStarPathAngle;
                        if (i == 1) {
                            AStarPathAngle = startZ;
                        } else {
                            AStarPathAngle = (int) pathValues[i - 1].val4;
                        }
                        switch (AStarPathAngle) {
                        case 0:
                        case 90:
                        case 180:
                        case 270:
                            if (debug >= 2) {
                                fileLogger.writeEvent(TAG,
                                        "Heading on a Straight line " + (numberOfMoves) + " Path");
                                fileLogger.writeEvent(TAG, "Adding Command (" + key + ", 10, " + "FW"
                                        + (numberOfMoves) + ", false, false, 0, 0, 0, 0, 0, 0, 0.8, false) ");
                                Log.d(TAG, "Heading on a Straight line " + (numberOfMoves) + " Path");
                                Log.d(TAG, "Adding Command (" + key + ", 10, " + "FW" + (numberOfMoves)
                                        + ", false, false, 0, 0, 0, 0, 0, 0, 0.8, false) ");
                            }
                            autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                                    "FW" + numberOfMoves, false, false, 0, 0, 0, 0, 0, 0, 0.8));
                            numberOfMoves = 0;
                            key++;
                            break;
                        case 45:
                        case 135:
                        case 225:
                        case 315:
                            if (debug >= 2) {
                                fileLogger.writeEvent(TAG, "Heading on a Straight line "
                                        + (int) ((numberOfMoves) * 1.4142) + " Path");
                                fileLogger.writeEvent(TAG,
                                        "Adding Command (" + key + ", 10, " + "FW"
                                                + (int) ((numberOfMoves) * 1.4142)
                                                + ", false, false, 0, 0, 0, 0, 0, 0, .8, false) ");
                                Log.d(TAG, "Heading on a Straight line " + (int) ((numberOfMoves) * 1.4142)
                                        + " Path");
                                Log.d(TAG,
                                        "Adding Command (" + key + ", 10, " + "FW"
                                                + (int) ((numberOfMoves) * 1.4142)
                                                + ", false, false, 0, 0, 0, 0, 0, 0, .8, false) ");
                            }
                            autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                                    "FW" + (int) (numberOfMoves * 1.4142), false, false, 0, 0, 0, 0, 0, 0, 1));
                            numberOfMoves = 0;
                            key++;
                            break;
                        }
                        if (debug >= 2) {
                            fileLogger.writeEvent(TAG,
                                    "Need to turn Robot " + strAngleChange + " Path " + pathValues[i].val1 + " "
                                            + pathValues[i].val2 + " " + pathValues[i].val3 + " Dir:= "
                                            + pathValues[i].val4);
                            fileLogger.writeEvent(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                                    + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                            Log.d(TAG,
                                    "Need to turn Robot " + strAngleChange + " Path " + pathValues[i].val1 + " "
                                            + pathValues[i].val2 + " " + pathValues[i].val3 + " Dir:= "
                                            + pathValues[i].val4);
                            Log.d(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                                    + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                        }
                        autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                                strAngleChange, false, false, 0, 0, 0, 0, 0, 0, 0.4));
                        key++;
                    }
                    if (!processingAStarSteps)
                        break;

                }
                //need to work out the direction we are facing and the required direction
                if ((lastDirection != endDir) && (!processingAStarSteps)) {
                    if (debug >= 2) {
                        fileLogger.writeEvent(TAG, "Sraight Moves Robot End Of Sequence - Need to Trun Robot");
                        Log.d(TAG, "Sraight Moves Robot End Of Sequence - Need to Trun Robot");
                    }
                    strAngleChange = getAngle((int) pathValues[lasti - 1].val4, endDir);
                    fileLogger.writeEvent(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                            + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                    Log.d(TAG, "Adding Command (" + key + ", 10, " + strAngleChange
                            + ", 0, 0, 0, 0, 0, 0, 1, false) ");
                    autonomousStepsAStar.put(String.valueOf(key), new LibraryStateSegAuto(key, 10,
                            strAngleChange, false, false, 0, 0, 0, 0, 0, 0, 0.4));
                    key++;
                }
            }
            mintCurStStep = stepState.STATE_ASTAR_INIT;

            break;
        case STATE_ASTAR_INIT: {
            if (debug >= 1) {
                fileLogger.writeEvent(TAG, "About to check if step exists " + mintCurrentStepAStar);
                Log.d(TAG, "About to check if step exists " + mintCurrentStepAStar);
            }
            // get step from hashmap, send it to the initStep for decoding
            if (autonomousStepsAStar.containsKey(String.valueOf(mintCurrentStepAStar))) {
                if (debug >= 1) {
                    fileLogger.writeEvent(TAG, "Step Exists TRUE " + mintCurrentStepAStar
                            + " about to get the values from the step");
                    Log.d(TAG, "Step Exists TRUE " + mintCurrentStepAStar
                            + " about to get the values from the step");
                }
                processingSteps = autonomousStepsAStar.get(String.valueOf(mintCurrentStepAStar)); //read the step from the hashmap
                autonomousStepsAStar.remove(String.valueOf(mintCurrentStepAStar)); //remove the step from the hashmap
                if (debug >= 1) {
                    fileLogger.writeEvent(TAG, "Got the values for step " + mintCurrentStepAStar
                            + " about to decode and removed them");
                    Log.d(TAG, "Got the values for step " + mintCurrentStepAStar
                            + " about to decode and removed them");
                }
                //decode the step from hashmap
                initAStarStep(processingSteps);
            } else {
                //if no steps left in hashmap then complete
                mintCurStStep = stepState.STATE_ASTAR_COMPLETE;
            }
        }
            break;
        case STATE_ASTAR_RUNNING: {
            //move robot according AStar hashmap
            TankTurnStep();
            PivotTurnStep();
            RadiusTurnStep();
            DriveStepHeading();
            if ((mintCurStDriveHeading == stepState.STATE_COMPLETE)
                    && (mintCurStPivotTurn == stepState.STATE_COMPLETE)
                    && (mintCurStTankTurn == stepState.STATE_COMPLETE)
                    && (mintCurStRadiusTurn == stepState.STATE_COMPLETE)) {
                //increment ASTar Steps Counter
                mintCurrentStepAStar++;
                mintCurStStep = stepState.STATE_ASTAR_INIT;
            }

        }
            break;
        case STATE_ASTAR_ERROR: {
            //do something on error
        }
            break;
        case STATE_ASTAR_COMPLETE: {
            //empty hashmap ready for next AStar processing.
            //clear AStar step counter ready for next AStar process
            mintCurrentStepAStar = 0;

            //when complete, keep processing normal step
            if (debug >= 1) {
                fileLogger.writeEvent(TAG, "A* Path Completed:- " + mintCurrentStep);
                Log.d(TAG, "A* Path Completed:- " + mintCurrentStep);
            }

            //  Transition to a new state and next step.
            mintCurrentStep++;
            mintCurStStep = stepState.STATE_INIT;
        }
            break;
        }

        //process LED status
        //ERROR - FLASH RED 3 TIMES
        switch (mint5291LEDStatus) {
        case STATE_TEAM: //FLASH Alliance Colour
            switch (allianceColor) {
            case "Red":
                LedState(LedOff, LedOn, LedOff, LedOff, LedOn, LedOff);
                break;
            case "Blue":
                LedState(LedOff, LedOff, LedOn, LedOff, LedOff, LedOn);
                break;
            default:
                LedState(LedOn, LedOn, LedOn, LedOn, LedOn, LedOn);
                break;
            }
        case STATE_ERROR: //Flash RED 3 times Rapidly
            if ((!mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOff + 250))) {
                mdblLastOn = mStateTime.milliseconds();
                mblnLEDON = true;
                LedState(LedOff, LedOn, LedOff, LedOff, LedOn, LedOff);
            } else if ((mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOn + 750))) {
                mdblLastOff = mStateTime.milliseconds();
                mblnLEDON = false;
                LedState(LedOff, LedOff, LedOff, LedOff, LedOff, LedOff);
            }
            break;
        case STATE_SUCCESS: //Flash GREEN 3 times Rapidly
            if ((!mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOff + 250))) {
                mdblLastOn = mStateTime.milliseconds();
                mblnLEDON = true;
                LedState(LedOn, LedOff, LedOff, LedOn, LedOff, LedOff);
            } else if ((mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOn + 250))) {
                mdblLastOff = mStateTime.milliseconds();
                mblnLEDON = false;
                LedState(LedOff, LedOff, LedOff, LedOff, LedOff, LedOff);
                mintCounts++;
            }
            if (mintCounts >= 5) {
                mintCounts = 0;
                mint5291LEDStatus = LEDState.STATE_TEAM;
            }
            break;
        case STATE_BEACON: //

            if ((!mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOff + 500))) {
                mdblLastOn = mStateTime.milliseconds();
                mblnLEDON = true;
                if (mColour == Constants.BeaconColours.BEACON_BLUE_RED) { //means red is to the right
                    LedState(LedOff, LedOff, LedOn, LedOff, LedOff, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_RED_BLUE) {
                    LedState(LedOff, LedOn, LedOff, LedOff, LedOff, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_BLUE) {
                    LedState(LedOn, LedOff, LedOff, LedOff, LedOff, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_BLUE) {
                    LedState(LedOff, LedOn, LedOff, LedOff, LedOff, LedOff);
                }
            } else if ((mblnLEDON) && (mStateTime.milliseconds() > (mdblLastOn + 500))) {
                mdblLastOff = mStateTime.milliseconds();
                mblnLEDON = false;
                if (mColour == Constants.BeaconColours.BEACON_BLUE_RED) { //means red is to the right
                    LedState(LedOff, LedOff, LedOff, LedOff, LedOn, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_RED_BLUE) {
                    LedState(LedOff, LedOff, LedOff, LedOff, LedOff, LedOn);
                } else if (mColour == Constants.BeaconColours.BEACON_BLUE) {
                    LedState(LedOff, LedOff, LedOff, LedOn, LedOff, LedOff);
                } else if (mColour == Constants.BeaconColours.BEACON_BLUE) {
                    LedState(LedOff, LedOff, LedOff, LedOff, LedOn, LedOff);
                }
                mintCounts++;
            }
            if (mintCounts >= 10) {
                mintCounts = 0;
                mint5291LEDStatus = LEDState.STATE_TEAM;
            }
            break;
        case STATE_FINISHED: //Solid Green
            LedState(LedOn, LedOff, LedOff, LedOn, LedOff, LedOff);
            break;

        }
        telemetry.update();
    }
    //opmode not active anymore
}

From source file:cn.xiongyihui.webcam.JpegFactory.java

License:Open Source License

public void onPreviewFrame(byte[] data, Camera camera) {
    YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, mWidth, mHeight, null);

    mJpegOutputStream.reset();/*from w w w  . j  a  v a  2  s . c o  m*/

    try {
        //Log.e(TAG, "Beginning to read values!");
        double distanceTemplateFeatures = this.globalClass.getDistanceTemplateFeatures();
        double xTemplateCentroid = this.globalClass.getXtemplateCentroid();
        double yTemplateCentroid = this.globalClass.getYtemplateCentroid();
        int x0template = this.globalClass.getX0display();
        int y0template = this.globalClass.getY0display();
        int x1template = this.globalClass.getX1display();
        int y1template = this.globalClass.getY1display();
        Mat templateDescriptor = this.globalClass.getTemplateDescriptor();
        MatOfKeyPoint templateKeyPoints = this.globalClass.getKeyPoints();
        KeyPoint[] templateKeyPointsArray = templateKeyPoints.toArray();
        int numberOfTemplateFeatures = this.globalClass.getNumberOfTemplateFeatures();
        int numberOfPositiveTemplateFeatures = this.globalClass.getNumberOfPositiveTemplateFeatures();
        KeyPoint[] normalisedTemplateKeyPoints = this.globalClass.getNormalisedTemplateKeyPoints();
        double normalisedXcentroid = this.globalClass.getNormalisedXcentroid();
        double normalisedYcentroid = this.globalClass.getNormalisedYcentroid();
        int templateCapturedBitmapWidth = this.globalClass.getTemplateCapturedBitmapWidth();
        int templateCapturedBitmapHeight = this.globalClass.getTemplateCapturedBitmapHeight();
        //Log.e(TAG, "Ended reading values!");
        globalClass.setJpegFactoryDimensions(mWidth, mHeight);
        double scalingRatio, scalingRatioHeight, scalingRatioWidth;

        scalingRatioHeight = (double) mHeight / (double) templateCapturedBitmapHeight;
        scalingRatioWidth = (double) mWidth / (double) templateCapturedBitmapWidth;
        scalingRatio = (scalingRatioHeight + scalingRatioWidth) / 2; //Just to account for any minor variations.
        //Log.e(TAG, "Scaling ratio:" + String.valueOf(scalingRatio));
        //Log.e("Test", "Captured Bitmap's dimensions: (" + templateCapturedBitmapHeight + "," + templateCapturedBitmapWidth + ")");

        //Scale the actual features of the image
        int flag = this.globalClass.getFlag();
        if (flag == 0) {
            int iterate = 0;
            int iterationMax = numberOfTemplateFeatures;

            for (iterate = 0; iterate < (iterationMax); iterate++) {
                Log.e(TAG, "Point detected " + iterate + ":(" + templateKeyPointsArray[iterate].pt.x + ","
                        + templateKeyPointsArray[iterate].pt.y + ")");

                if (flag == 0) {
                    templateKeyPointsArray[iterate].pt.x = scalingRatio
                            * (templateKeyPointsArray[iterate].pt.x + (double) x0template);
                    templateKeyPointsArray[iterate].pt.y = scalingRatio
                            * (templateKeyPointsArray[iterate].pt.y + (double) y0template);
                }
                Log.e(TAG, "Scaled points:(" + templateKeyPointsArray[iterate].pt.x + ","
                        + templateKeyPointsArray[iterate].pt.y + ")");
            }

            this.globalClass.setFlag(1);
        }

        templateKeyPoints.fromArray(templateKeyPointsArray);
        //Log.e(TAG, "Template-features have been scaled successfully!");

        long timeBegin = (int) System.currentTimeMillis();
        Mat mYuv = new Mat(mHeight + mHeight / 2, mWidth, CvType.CV_8UC1);
        mYuv.put(0, 0, data);
        Mat mRgb = new Mat();
        Imgproc.cvtColor(mYuv, mRgb, Imgproc.COLOR_YUV420sp2RGB);

        Mat result = new Mat();
        Imgproc.cvtColor(mRgb, result, Imgproc.COLOR_RGB2GRAY);
        int detectorType = FeatureDetector.ORB;
        FeatureDetector featureDetector = FeatureDetector.create(detectorType);
        MatOfKeyPoint keypointsImage = new MatOfKeyPoint();
        featureDetector.detect(result, keypointsImage);
        KeyPoint[] imageKeypoints = keypointsImage.toArray();

        Scalar color = new Scalar(0, 0, 0);

        DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

        Mat imageDescriptor = new Mat();
        descriptorExtractor.compute(result, keypointsImage, imageDescriptor);

        //BRUTEFORCE_HAMMING apparently finds even the suspicious feature-points! So, inliers and outliers can turn out to be a problem

        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
        MatOfDMatch matches = new MatOfDMatch();
        matcher.match(imageDescriptor, templateDescriptor, matches);

        //Log.e("Prasad", String.valueOf(mWidth) + "," + String.valueOf(mHeight));

        DMatch[] matchesArray = matches.toArray();

        double minimumMatchDistance = globalClass.getHammingDistance();

        int iDescriptorMax = matchesArray.length;
        int iterateDescriptor;

        double xMatchedPoint, yMatchedPoint;
        int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS;

        Point point;

        double rHigh = this.globalClass.getRhigh();
        double rLow = this.globalClass.getRlow();
        double gHigh = this.globalClass.getGhigh();
        double gLow = this.globalClass.getGlow();
        double bHigh = this.globalClass.getBhigh();
        double bLow = this.globalClass.getBlow();

        double[] colorValue;
        double red, green, blue;
        int[] featureCount;
        double xKernelSize = 9, yKernelSize = 9;
        globalClass.setKernelSize(xKernelSize, yKernelSize);
        double xImageKernelScaling, yImageKernelScaling;

        xImageKernelScaling = xKernelSize / mWidth;
        yImageKernelScaling = yKernelSize / mHeight;
        int[][] kernel = new int[(int) xKernelSize][(int) yKernelSize];
        double[][] kernelCounter = new double[(int) xKernelSize][(int) yKernelSize];
        int numberKernelMax = 10;
        globalClass.setNumberKernelMax(numberKernelMax);
        int[][][] kernelArray = new int[(int) xKernelSize][(int) yKernelSize][numberKernelMax];
        double featureImageResponse;
        double xImageCentroid, yImageCentroid;
        double xSum = 0, ySum = 0;
        double totalImageResponse = 0;

        for (iterateDescriptor = 0; iterateDescriptor < iDescriptorMax; iterateDescriptor++) {
            if (matchesArray[iterateDescriptor].distance < minimumMatchDistance) {
                //MatchedPoint: Awesome match without color feedback
                xMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.x;
                yMatchedPoint = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt.y;

                colorValue = mRgb.get((int) yMatchedPoint, (int) xMatchedPoint);

                red = colorValue[0];
                green = colorValue[1];
                blue = colorValue[2];

                int xKernelFeature, yKernelFeature;
                //Color feedback
                if ((rLow < red) & (red < rHigh) & (gLow < green) & (green < gHigh) & (bLow < blue)
                        & (blue < bHigh)) {
                    try {
                        featureImageResponse = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].response;
                        if (featureImageResponse > 0) {
                            xSum = xSum + featureImageResponse * xMatchedPoint;
                            ySum = ySum + featureImageResponse * yMatchedPoint;
                            totalImageResponse = totalImageResponse + featureImageResponse;
                            point = imageKeypoints[matchesArray[iterateDescriptor].queryIdx].pt;

                            xKernelFeature = (int) (xMatchedPoint * xImageKernelScaling);
                            yKernelFeature = (int) (yMatchedPoint * yImageKernelScaling);
                            kernelCounter[xKernelFeature][yKernelFeature]++;
                            //Core.circle(result, point, 3, color);
                        }
                    } catch (Exception e) {
                    }
                }
                //Log.e(TAG, iterateDescriptor + ": (" + xMatchedPoint + "," + yMatchedPoint + ")");
            }
        }

        int iKernel = 0, jKernel = 0;
        for (iKernel = 0; iKernel < xKernelSize; iKernel++) {
            for (jKernel = 0; jKernel < yKernelSize; jKernel++) {
                if (kernelCounter[iKernel][jKernel] > 0) {
                    kernel[iKernel][jKernel] = 1;
                } else {
                    kernel[iKernel][jKernel] = 0;
                }
            }
        }

        xImageCentroid = xSum / totalImageResponse;
        yImageCentroid = ySum / totalImageResponse;

        if ((Double.isNaN(xImageCentroid)) | (Double.isNaN(yImageCentroid))) {
            //Log.e(TAG, "Centroid is not getting detected! Increasing hamming distance (error-tolerance)!");
            globalClass.setHammingDistance((int) (minimumMatchDistance + 2));
        } else {
            //Log.e(TAG, "Centroid is getting detected! Decreasing and optimising hamming (error-tolerance)!");
            globalClass.setHammingDistance((int) (minimumMatchDistance - 1));
            int jpegCount = globalClass.getJpegFactoryCallCount();
            jpegCount++;
            globalClass.setJpegFactoryCallCount(jpegCount);
            int initialisationFlag = globalClass.getInitialisationFlag();
            int numberOfDistances = 10;
            globalClass.setNumberOfDistances(numberOfDistances);

            if ((jpegCount > globalClass.getNumberKernelMax()) & (jpegCount > numberOfDistances)) {
                globalClass.setInitialisationFlag(1);
            }

            int[][] kernelSum = new int[(int) xKernelSize][(int) yKernelSize],
                    mask = new int[(int) xKernelSize][(int) yKernelSize];
            int iJpeg, jJpeg;
            kernelSum = globalClass.computeKernelSum(kernel);

            Log.e(TAG, Arrays.deepToString(kernelSum));

            for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) {
                for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) {
                    if (kernelSum[iJpeg][jJpeg] > (numberKernelMax / 4)) {//Meant for normalised kernel
                        mask[iJpeg][jJpeg]++;
                    }
                }
            }

            Log.e(TAG, Arrays.deepToString(mask));

            int maskedFeatureCount = 1, xMaskFeatureSum = 0, yMaskFeatureSum = 0;

            for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) {
                for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) {
                    if (mask[iJpeg][jJpeg] == 1) {
                        xMaskFeatureSum = xMaskFeatureSum + iJpeg;
                        yMaskFeatureSum = yMaskFeatureSum + jJpeg;
                        maskedFeatureCount++;
                    }
                }
            }

            double xMaskMean = xMaskFeatureSum / maskedFeatureCount;
            double yMaskMean = yMaskFeatureSum / maskedFeatureCount;

            double xSquaredSum = 0, ySquaredSum = 0;
            for (iJpeg = 0; iJpeg < xKernelSize; iJpeg++) {
                for (jJpeg = 0; jJpeg < yKernelSize; jJpeg++) {
                    if (mask[iJpeg][jJpeg] == 1) {
                        xSquaredSum = xSquaredSum + (iJpeg - xMaskMean) * (iJpeg - xMaskMean);
                        ySquaredSum = ySquaredSum + (jJpeg - yMaskMean) * (jJpeg - yMaskMean);
                    }
                }
            }

            double xRMSscaled = Math.sqrt(xSquaredSum);
            double yRMSscaled = Math.sqrt(ySquaredSum);
            double RMSimage = ((xRMSscaled / xImageKernelScaling) + (yRMSscaled / yImageKernelScaling)) / 2;
            Log.e(TAG, "RMS radius of the image: " + RMSimage);

            /*//Command the quadcopter and send PWM values to Arduino
            double throttlePWM = 1500, yawPWM = 1500, pitchPWM = 1500;
            double deltaThrottle = 1, deltaYaw = 1, deltaPitch = 1;
                    
            throttlePWM = globalClass.getThrottlePWM();
            pitchPWM = globalClass.getPitchPWM();
            yawPWM = globalClass.getYawPWM();
                    
            deltaThrottle = globalClass.getThrottleDelta();
            deltaPitch = globalClass.getPitchDelta();
            deltaYaw = globalClass.getYawDelta();
                    
            if(yImageCentroid>yTemplateCentroid) {
            throttlePWM = throttlePWM + deltaThrottle;
            }else{
            throttlePWM = throttlePWM - deltaThrottle;
            }
                    
            if(RMSimage>distanceTemplateFeatures) {
            pitchPWM = pitchPWM + deltaPitch;
            }else{
            pitchPWM = pitchPWM - deltaPitch;
            }
                    
            if(xImageCentroid>xTemplateCentroid) {
            yawPWM = yawPWM + deltaYaw;
            }else{
            yawPWM = yawPWM - deltaYaw;
            }
                    
            if(1000>throttlePWM){   throttlePWM = 1000; }
                    
            if(2000<throttlePWM){   throttlePWM = 2000; }
                    
            if(1000>pitchPWM){  pitchPWM = 1000;    }
                    
            if(2000<pitchPWM){  pitchPWM = 2000;    }
                    
            if(1000>yawPWM){    yawPWM = 1000;  }
                    
            if(2000<yawPWM){    yawPWM = 2000;  }
                    
            globalClass.setPitchPWM(pitchPWM);
            globalClass.setYawPWM(yawPWM);
            globalClass.setThrottlePWM(throttlePWM);*/

            //Display bounding circle
            int originalWidthBox = x1template - x0template;
            int originalHeightBox = y1template - y0template;

            double scaledBoundingWidth = (originalWidthBox * RMSimage / distanceTemplateFeatures);
            double scaledBoundingHeight = (originalHeightBox * RMSimage / distanceTemplateFeatures);

            double displayRadius = (scaledBoundingWidth + scaledBoundingHeight) / 2;
            displayRadius = displayRadius * 1.4826;
            displayRadius = displayRadius / numberKernelMax;
            double distanceAverage = 0;
            if (Double.isNaN(displayRadius)) {
                //Log.e(TAG, "displayRadius is NaN!");
            } else {
                distanceAverage = globalClass.imageDistanceAverage(displayRadius);
                //Log.e(TAG, "Average distance: " + distanceAverage);
            }

            if ((Double.isNaN(xImageCentroid)) | Double.isNaN(yImageCentroid)) {
                //Log.e(TAG, "Centroid is NaN!");
            } else {
                globalClass.centroidAverage(xImageCentroid, yImageCentroid);
            }

            if (initialisationFlag == 1) {
                //int displayRadius = 50;

                Point pointDisplay = new Point();
                //pointDisplay.x = xImageCentroid;
                //pointDisplay.y = yImageCentroid;
                pointDisplay.x = globalClass.getXcentroidAverageGlobal();
                pointDisplay.y = globalClass.getYcentroidAverageGlobal();
                globalClass.centroidAverage(xImageCentroid, yImageCentroid);
                int distanceAverageInt = (int) distanceAverage;
                Core.circle(result, pointDisplay, distanceAverageInt, color);
            }

        }

        Log.e(TAG, "Centroid in the streamed image: (" + xImageCentroid + "," + yImageCentroid + ")");
        /*try {
        //Features2d.drawKeypoints(result, keypointsImage, result, color, flagDraw);
        Features2d.drawKeypoints(result, templateKeyPoints, result, color, flagDraw);
        }catch(Exception e){}*/

        //Log.e(TAG, "High (R,G,B): (" + rHigh + "," + gHigh + "," + bHigh + ")");
        //Log.e(TAG, "Low (R,G,B): (" + rLow + "," + gLow + "," + bLow + ")");

        //Log.e(TAG, Arrays.toString(matchesArray));

        try {
            Bitmap bmp = Bitmap.createBitmap(result.cols(), result.rows(), Bitmap.Config.ARGB_8888);
            Utils.matToBitmap(result, bmp);
            //Utils.matToBitmap(mRgb, bmp);
            bmp.compress(Bitmap.CompressFormat.JPEG, mQuality, mJpegOutputStream);
        } catch (Exception e) {
            Log.e(TAG, "JPEG not working!");
        }

        long timeEnd = (int) System.currentTimeMillis();
        Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + "milli-seconds!");

        mJpegData = mJpegOutputStream.toByteArray();

        synchronized (mJpegOutputStream) {
            mJpegOutputStream.notifyAll();
        }
    } catch (Exception e) {
        Log.e(TAG, "JPEG-factory is not working!");
    }

}

From source file:cn.xiongyihui.webcam.setup.java

License:Open Source License

@Override
public boolean onTouchEvent(MotionEvent event) {

    if (event.getAction() == MotionEvent.ACTION_DOWN) {
        try {/*w ww.  j  a v  a 2 s  . co  m*/
            final ImageView imageView = (ImageView) findViewById(R.id.imageView);

            int X = (int) event.getX();
            int Y = (int) event.getY();

            int[] coordinates = new int[2];//{0,0};
            imageView.getLocationOnScreen(coordinates);
            int viewTop = coordinates[1];
            int viewBottom = coordinates[1] + imageView.getHeight();
            try {
                int viewLeft = coordinates[2];
                int viewRight = coordinates[2] + imageView.getWidth();
            } catch (Exception e) {
                Log.e(TAG, "getLocationOnScreen:Error!");
            }

            imageViewHeight = (double) viewBottom - viewTop;
            imageViewWidth = aspectRatio * imageViewHeight;

            int imageViewWidthINT = (int) imageViewWidth;
            int imageViewHeightINT = (int) imageViewHeight;

            Display display = getWindowManager().getDefaultDisplay();
            Point size = new Point();
            display.getSize(size);
            int widthScreen = (int) size.x;
            int heightScreen = (int) size.y;

            int Yoffset = heightScreen - viewBottom;
            int Xoffset = widthScreen - imageView.getWidth();

            int virtualOriginX = (int) ((widthScreen - imageViewWidthINT + Xoffset) / 2);
            int virtualOriginY = (int) (heightScreen - imageViewHeightINT - Yoffset / 2);

            x0 = X - virtualOriginX;
            y0 = Y - virtualOriginY;

            double openCVratio = (double) bitmapHeight / imageViewHeight;

            x0final = (int) ((double) x0 * openCVratio);
            y0final = (int) ((double) y0 * openCVratio);
        } catch (Exception e) {
            Log.e(TAG, "Touch events are not working!");
        }
    }

    if (event.getAction() == MotionEvent.ACTION_UP) {
        try {
            final ImageView imageView = (ImageView) findViewById(R.id.imageView);

            int X = (int) event.getX();
            int Y = (int) event.getY();

            int[] coordinates = new int[2];//{0,0};
            imageView.getLocationOnScreen(coordinates);
            int viewTop = coordinates[1];
            int viewBottom = coordinates[1] + imageView.getHeight();
            try {
                int viewLeft = coordinates[2];
                int viewRight = coordinates[2] + imageView.getWidth();
            } catch (Exception e) {
                Log.e(TAG, "getLocationOnScreen:Error!");
            }

            imageViewHeight = (double) viewBottom - viewTop;
            imageViewWidth = aspectRatio * imageViewHeight;

            int imageViewWidthINT = (int) imageViewWidth;
            int imageViewHeightINT = (int) imageViewHeight;

            Display display = getWindowManager().getDefaultDisplay();
            android.graphics.Point size = new android.graphics.Point();
            display.getSize(size);
            int widthScreen = (int) size.x;
            int heightScreen = (int) size.y;

            int Yoffset = heightScreen - viewBottom;
            int Xoffset = widthScreen - imageView.getWidth();

            int virtualOriginX = (int) ((widthScreen - imageViewWidthINT + Xoffset) / 2);
            int virtualOriginY = (int) (heightScreen - imageViewHeightINT - Yoffset / 2);

            x1 = X - virtualOriginX;
            y1 = Y - virtualOriginY;

            double openCVratio = (double) bitmapHeight / imageViewHeight;

            x1final = (int) ((double) x1 * openCVratio);
            y1final = (int) ((double) y1 * openCVratio);

            bitmap = BitmapFactory.decodeFile(filePath);
            bitmap = Bitmap.createScaledBitmap(bitmap, bitmapWidth, bitmapHeight, true);
            Mat frame = new Mat(bitmap.getHeight(), bitmap.getHeight(), CvType.CV_8UC3);
            Utils.bitmapToMat(bitmap, frame);
            rect = new Rect(x0final, y0final, x1final - x0final, y1final - y0final);
            Core.rectangle(frame, rect.tl(), rect.br(), color, 3);
            Utils.matToBitmap(frame, bitmap);
            imageView.setImageBitmap(bitmap);
        } catch (Exception e) {
            Log.e(TAG, "Touch events are not working!");
        }
    }

    return true;
}

From source file:cn.xiongyihui.webcam.setup.java

License:Open Source License

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_setup);

    this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);

    final Button cameraButton = (Button) findViewById(R.id.cameraButton);
    final Button selectButton = (Button) findViewById(R.id.selectButton);
    final Button templateButton = (Button) findViewById(R.id.templateButton);
    final Button instructionButton = (Button) findViewById(R.id.instructionButton);
    final ImageView imageView = (ImageView) findViewById(R.id.imageView);

    try {//from w w w .  j a  va  2 s. c om
        int NUMBER_OF_CORES = Runtime.getRuntime().availableProcessors();

        Toast.makeText(this, NUMBER_OF_CORES, Toast.LENGTH_SHORT).show();
    } catch (Exception e) {
        Log.e(TAG, "Processor-cores are not getting detected!");
    }

    try {
        final Toast toast = Toast.makeText(this,
                "Please capture image; \n" + "select image; \n"
                        + "Drag-and-drop, swipe on the desired region and confirm template!",
                Toast.LENGTH_LONG);
        final TextView v = (TextView) toast.getView().findViewById(android.R.id.message);
        instructionButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View arg0) {
                if (v != null)
                    v.setGravity(Gravity.CENTER);
                toast.show();
            }
        });
    } catch (Exception e) {
        Log.e(TAG, "Instructions are not getting displayed!");
    }

    try {
        cameraButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View arg0) {
                Intent intent = new Intent("android.media.action.IMAGE_CAPTURE");
                startActivityForResult(intent, requestCode);
            }
        });
    } catch (Exception e) {
        Log.e(TAG, "Camera is not working!");
    }

    try {
        selectButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View arg0) {
                Intent i = new Intent(Intent.ACTION_PICK,
                        android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);

                startActivityForResult(i, requestCode);

                bitmap = BitmapFactory.decodeFile(filePath);
                imageView.setImageBitmap(bitmap);
            }
        });
    } catch (Exception e) {
        Log.e(TAG, "Selection is not working!");
    }

    try {
        templateButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View arg0) {
                if (imageView.getDrawable() == null) {
                    Log.e(TAG, "Null ImageView!");
                }
                Log.e(TAG, "Button is working.");
                try {
                    bitmap = BitmapFactory.decodeFile(filePath);
                    bitmap = Bitmap.createScaledBitmap(bitmap, bitmapWidth, bitmapHeight, true);
                    Mat frame = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4);
                    Utils.bitmapToMat(bitmap, frame);

                    GlobalClass globalVariable = (GlobalClass) getApplicationContext();
                    globalVariable.setTemplateCapturedBitmapHeight(bitmapHeight);
                    globalVariable.setTemplateCapturedBitmapWidth(bitmapWidth);
                    Log.e(TAG, "Bitmap has been set successfully; Template is being generated!");

                    rect = new Rect(x0final, y0final, x1final - x0final, y1final - y0final);
                    Utils.matToBitmap(frame, bitmap);

                    if (x0final < x1final) {
                        x0display = x0final;
                        x1display = x1final;
                    }
                    if (x0final > x1final) {
                        x1display = x0final;
                        x0display = x1final;
                    }
                    if (y0final < y1final) {
                        y0display = y0final;
                        y1display = y1final;
                    }
                    if (y0final > y1final) {
                        y1display = y0final;
                        y0display = y1final;
                    }

                    long timeBegin = (int) System.currentTimeMillis();

                    bitmap = Bitmap.createBitmap(bitmap, x0display, y0display, x1display - x0display,
                            y1display - y0display);

                    /*String path = Environment.getExternalStorageDirectory().toString();
                            
                    Log.e(TAG, "File is about to be written!");
                            
                    //File file = new File(path, "TraQuad");
                    //bitmap.compress(Bitmap.CompressFormat.PNG, 100, fOutputStream);
                            
                    //Log.e(TAG, "Stored image successfully!");
                    //fOutputStream.flush();
                    //fOutputStream.close();
                            
                    //MediaStore.Images.Media.insertImage(getContentResolver(), file.getAbsolutePath(), file.getName(), file.getName());*/

                    /*Prominent colors code; This is not working in Android; OpenCV assertion error
                    Log.e(TAG, "Retrieved image successfully!");
                            
                    Imgproc.medianBlur(frame, frame, 3);
                    Log.e(TAG, "Filtered image successfully!");
                            
                    try {
                    Mat mask = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC1);
                    MatOfFloat range = new MatOfFloat(0f, 255f);
                    Mat hist = new Mat();
                    MatOfInt mHistSize = new MatOfInt(256);
                    List<Mat> lHsv = new ArrayList<Mat>(3);
                    Mat hsv = new Mat();
                    Imgproc.cvtColor(frame, hsv, Imgproc.COLOR_RGB2HSV);
                    Core.split(frame, lHsv);
                    Mat mH = lHsv.get(0);
                    Mat mS = lHsv.get(1);
                    Mat mV = lHsv.get(2);
                    ArrayList<Mat> ListMat = new ArrayList<Mat>();
                    ListMat.add(mH);
                    Log.e(TAG, String.valueOf(ListMat));
                    MatOfInt channels = new MatOfInt(0, 1);
                    Imgproc.calcHist(Arrays.asList(mH), channels, mask, hist, mHistSize, range);
                    ListMat.clear();
                    }catch (Exception e){
                    Log.e(TAG, "Prominent colors are not getting detected!");
                    }*/

                    Mat colorFrame = frame;
                    colorFrame = frame.clone();

                    Utils.bitmapToMat(bitmap, frame);
                    Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGB2GRAY);

                    Log.e(TAG, "Converted color successfully!");

                    int detectorType = FeatureDetector.ORB;
                    //int detectorType = FeatureDetector.SIFT; //SIFT and SURF are not working!
                    //int detectorType = FeatureDetector.SURF;
                    FeatureDetector featureDetector = FeatureDetector.create(detectorType);

                    Log.e(TAG, "Feature detection has begun!");

                    MatOfKeyPoint keypoints = new MatOfKeyPoint();

                    featureDetector.detect(frame, keypoints);

                    Log.e(TAG, "Feature detection has ended successfully!");

                    /*if (!featureDetector.empty()) {
                    //Draw the detected keypoints
                    int flagDraw = Features2d.NOT_DRAW_SINGLE_POINTS;
                    Features2d.drawKeypoints(frame, keypoints, frame, color, flagDraw);
                    Utils.matToBitmap(frame, bitmap);
                    }*/

                    imageView.setImageBitmap(bitmap);

                    Log.e(TAG, "Final bitmap has been loaded!");

                    KeyPoint[] referenceKeypoints = keypoints.toArray();

                    Log.e(TAG, "Number of keypoints detected is " + String.valueOf(referenceKeypoints.length));

                    int iterationMax = referenceKeypoints.length;
                    int iterate = 0;
                    double xFeaturePoint, yFeaturePoint;
                    double xSum = 0, ySum = 0;
                    double totalResponse = 0;
                    double keyPointResponse = 0;
                    double xTemplateCentroid = 0, yTemplateCentroid = 0;

                    DescriptorExtractor descriptorExtractor = DescriptorExtractor
                            .create(DescriptorExtractor.ORB);

                    Mat templateDescriptor = new Mat();

                    descriptorExtractor.compute(frame, keypoints, templateDescriptor);

                    for (iterate = 0; iterate < iterationMax; iterate++) {
                        xFeaturePoint = referenceKeypoints[iterate].pt.x;
                        yFeaturePoint = referenceKeypoints[iterate].pt.y;
                        keyPointResponse = referenceKeypoints[iterate].response;

                        if (keyPointResponse > 0) {
                            xSum = xSum + keyPointResponse * xFeaturePoint;
                            ySum = ySum + keyPointResponse * yFeaturePoint;
                            totalResponse = totalResponse + keyPointResponse;

                            //Log.e(TAG, "Feature " + String.valueOf(iterate) + ":" + String.valueOf(referenceKeypoints[iterate]));
                        }
                    }

                    xTemplateCentroid = xSum / totalResponse;
                    yTemplateCentroid = ySum / totalResponse;
                    Log.e(TAG, "Finished conversion of features to points!");
                    Log.e(TAG, "Centroid location is: (" + xTemplateCentroid + "," + yTemplateCentroid + ")");

                    double xSquareDistance = 0, ySquareDistance = 0;
                    double distanceTemplateFeatures = 0;
                    int numberOfPositiveResponses = 0;

                    double[] colorValue;
                    double rSum = 0, gSum = 0, bSum = 0;
                    double rCentral, gCentral, bCentral;

                    for (iterate = 0; iterate < iterationMax; iterate++) {
                        xFeaturePoint = referenceKeypoints[iterate].pt.x;
                        yFeaturePoint = referenceKeypoints[iterate].pt.y;
                        keyPointResponse = referenceKeypoints[iterate].response;

                        colorValue = colorFrame.get((int) yFeaturePoint, (int) xFeaturePoint);
                        rSum = rSum + colorValue[0];
                        gSum = gSum + colorValue[1];
                        bSum = bSum + colorValue[2];

                        if (keyPointResponse > 0) {
                            xSquareDistance = xSquareDistance
                                    + (xFeaturePoint - xTemplateCentroid) * (xFeaturePoint - xTemplateCentroid);
                            ySquareDistance = ySquareDistance
                                    + (yFeaturePoint - yTemplateCentroid) * (yFeaturePoint - yTemplateCentroid);
                            numberOfPositiveResponses++;
                        }
                    }

                    rCentral = rSum / iterationMax;
                    gCentral = gSum / iterationMax;
                    bCentral = bSum / iterationMax;

                    double deltaColor = 21;

                    double rLow = rCentral - deltaColor;
                    double rHigh = rCentral + deltaColor;
                    double gLow = rCentral - deltaColor;
                    double gHigh = rCentral + deltaColor;
                    double bLow = rCentral - deltaColor;
                    double bHigh = rCentral + deltaColor;

                    Log.e(TAG, "Prominent color (R,G,B): (" + rCentral + "," + gCentral + "," + bCentral + ")");

                    distanceTemplateFeatures = Math
                            .sqrt((xSquareDistance + ySquareDistance) / numberOfPositiveResponses);

                    KeyPoint[] offsetCompensatedKeyPoints = keypoints.toArray();

                    double xMaxNormalisation, yMaxNormalisation;

                    xMaxNormalisation = x1display - x0display;
                    yMaxNormalisation = y1display - y0display;

                    for (iterate = 0; iterate < iterationMax; iterate++) {
                        offsetCompensatedKeyPoints[iterate].pt.x = offsetCompensatedKeyPoints[iterate].pt.x
                                / xMaxNormalisation;
                        offsetCompensatedKeyPoints[iterate].pt.y = offsetCompensatedKeyPoints[iterate].pt.y
                                / yMaxNormalisation;

                        //Log.e(TAG, "Compensated: (" + String.valueOf(offsetCompensatedKeyPoints[iterate].pt.x) + "," + String.valueOf(offsetCompensatedKeyPoints[iterate].pt.y) + ")");
                    }

                    double xCentroidNormalised, yCentroidNormalised;

                    xCentroidNormalised = (xTemplateCentroid - x0display) / xMaxNormalisation;
                    yCentroidNormalised = (yTemplateCentroid - y0display) / yMaxNormalisation;

                    Log.e(TAG, "Normalised Centroid: (" + String.valueOf(xCentroidNormalised) + ","
                            + String.valueOf(yCentroidNormalised));

                    long timeEnd = (int) System.currentTimeMillis();
                    Log.e(TAG, "Time consumed is " + String.valueOf(timeEnd - timeBegin) + " milli-seconds!");

                    Log.e(TAG, "RMS distance is: " + distanceTemplateFeatures);

                    globalVariable.setDistanceTemplateFeatures(distanceTemplateFeatures);
                    globalVariable.setX0display(x0display);
                    globalVariable.setY0display(y0display);
                    globalVariable.setX1display(x1display);
                    globalVariable.setY1display(y1display);
                    globalVariable.setKeypoints(keypoints);
                    globalVariable.setXtemplateCentroid(xTemplateCentroid);
                    globalVariable.setYtemplateCentroid(yTemplateCentroid);
                    globalVariable.setTemplateDescriptor(templateDescriptor);
                    globalVariable.setNumberOfTemplateFeatures(iterationMax);
                    globalVariable.setNumberOfPositiveTemplateFeatures(numberOfPositiveResponses);
                    globalVariable.setRhigh(rHigh);
                    globalVariable.setRlow(rLow);
                    globalVariable.setGhigh(gHigh);
                    globalVariable.setGlow(gLow);
                    globalVariable.setBhigh(bHigh);
                    globalVariable.setBlow(bLow);
                    globalVariable.setXnormalisedCentroid(xCentroidNormalised);
                    globalVariable.setYnormalisedCentroid(yCentroidNormalised);
                    globalVariable.setNormalisedTemplateKeyPoints(offsetCompensatedKeyPoints);

                    Log.e(TAG, "Finished setting the global variables!");

                } catch (Exception e) {
                    Log.e(TAG, "Please follow instructions!");
                }
            }
        });
    } catch (Exception e) {
        Log.e(TAG, "Template is not working!");
    }

}

From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java

License:Apache License

/**
 *  Create a camera intrinsic matrix using input parameters
 *
 *  The camera intrinsic matrix will be like:
 *
 *       +-                       -+//  w  w  w .j  av  a2s.c om
 *       |  f   0    center.width  |
 *   A = |  0   f    center.height |
 *       |  0   0         1        |
 *       +-                       -+
 *
 *  @return An approximated (not actually calibrated) camera matrix
 */
private static Mat cameraMatrix(float f, Size center) {
    final double[] data = { f, 0, center.width, 0, f, center.height, 0, 0, 1f };
    Mat m = new Mat(3, 3, CvType.CV_64F);
    m.put(0, 0, data);
    return m;
}

From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java

License:Apache License

private static Mat quat2rpy(Mat quat) {
    double[] q = new double[4];
    quat.get(0, 0, q);/*from ww w .  j  a v  a2  s  .c o m*/

    double[] rpy = { Math.atan2(2 * (q[0] * q[1] + q[2] * q[3]), 1 - 2 * (q[1] * q[1] + q[2] * q[2])),
            Math.asin(2 * (q[0] * q[2] - q[3] * q[1])),
            Math.atan2(2 * (q[0] * q[3] + q[1] * q[2]), 1 - 2 * (q[2] * q[2] + q[3] * q[3])) };

    Mat rpym = new Mat(3, 1, CvType.CV_64F);
    rpym.put(0, 0, rpy);
    return rpym;
}

From source file:com.android.cts.verifier.sensors.RVCVXCheckAnalyzer.java

License:Apache License

private static Mat rodr2quat(Mat rodr) {
    double t = Core.norm(rodr);
    double[] r = new double[3];
    rodr.get(0, 0, r);/*ww w  .  j  a  va  2  s  .c o  m*/

    double[] quat = { Math.cos(t / 2), Math.sin(t / 2) * r[0] / t, Math.sin(t / 2) * r[1] / t,
            Math.sin(t / 2) * r[2] / t };
    Mat quatm = new Mat(4, 1, CvType.CV_64F);
    quatm.put(0, 0, quat);
    return quatm;
}

From source file:com.astrocytes.core.ImageHelper.java

License:Open Source License

public static Mat convertBufferedImageToMat(BufferedImage in) {
    byte[] pixels = ((DataBufferByte) in.getRaster().getDataBuffer()).getData();
    int type = CvType.CV_8UC3;

    if (in.getType() == BufferedImage.TYPE_BYTE_GRAY) {
        type = CvType.CV_8UC1;/*from   w  ww  .  jav a  2 s  .com*/
    }

    Mat out = new Mat(in.getHeight(), in.getWidth(), type);
    out.put(0, 0, pixels);

    return out;
}