Example usage for org.opencv.core Mat width

List of usage examples for org.opencv.core Mat width

Introduction

In this page you can find the example usage for org.opencv.core Mat width.

Prototype

public int width() 

Source Link

Usage

From source file:nz.ac.auckland.lablet.vision.CamShiftTracker.java

License:Open Source License

/**
 * Saves a Mat based image to /sdcard/ for debugging.
 *
 * @param frame The frame to save.//from w  w w  .  j ava2s  .  com
 * @param name The name of the file (without a file type).
 */
public void saveFrame(Mat frame, String name) {
    Bitmap bmp = Bitmap.createBitmap(frame.width(), frame.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(frame, bmp);
    this.saveFrame(bmp, name);
}

From source file:opencltest.YetAnotherTestT.java

public static void main(String[] args) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    Mat kernel = new Mat(3, 3, CV_8UC1);
    kernel.put(0, 0, new double[] { 0, 1, 0, 1, 1, 1, 0, 1, 0 });

    Mat source = Imgcodecs.imread("test.smaller.png");
    Mat blur = new Mat();
    Mat edges = new Mat();
    Mat dilated = new Mat();

    Imgproc.GaussianBlur(source, blur, new Size(13, 13), 0);
    Imgproc.Canny(blur, edges, 10, 30, 5, false);
    //        Imgproc.cvtColor(edges, edges, Imgproc.COLOR_RGB2GRAY);
    //        Imgproc.adaptiveThreshold(edges, edges, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY, 5, 2);
    //        Core.bitwise_not(edges, edges);
    //        Imgproc.dilate(edges, edges, kernel);
    //        Imgproc.dilate(edges, dilated, kernel);
    dilated = edges;// w w w. j  a  v a 2 s. c  o m
    //        Core.bitwise_not(edges, edges);

    Mat lines = new Mat();
    Imgproc.HoughLinesP(dilated, lines, 1, Math.PI / 180, 300, 10, 70);

    Mat empty = new Mat(source.height(), source.width(), source.type());
    //        paintLines(empty, lines);

    List<MatOfPoint> contours = new ArrayList<>();
    Mat hier = new Mat();
    Imgproc.findContours(edges, contours, hier, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    Mat foundSquare = new Mat(source.height(), source.width(), CvType.CV_8UC4);
    source.copyTo(foundSquare);

    List<Double> hor = new ArrayList<>();
    for (Iterator<MatOfPoint> iterator = contours.iterator(); iterator.hasNext();) {
        MatOfPoint next = iterator.next();
        Rect bounding = Imgproc.boundingRect(next);
        int tr = 20;
        if (diffLessThan(bounding.size().width - 40, tr) && diffLessThan(bounding.size().height - 40, tr)) {
            Imgproc.rectangle(empty, bounding.tl(), bounding.br(), randomColor(), 3);
            //                hor.add(bounding.x + 0.0);
            hor.add(bounding.x + bounding.width / 2.0 + 0.0);
            drawRect(bounding, foundSquare);
        }
    }

    Imgcodecs.imwrite("test_2.png", source);
    Imgcodecs.imwrite("test_3.png", dilated);
    Imgcodecs.imwrite("test_4.png", empty);
    Imgcodecs.imwrite("test_h.png", foundSquare);

    hor.sort(Double::compare);
    double low = hor.get(0);
    double hih = hor.get(hor.size() - 1);
    double n = hor.size();
    Function<Double, Double> K = (d) -> (Math.abs(d) <= 1) ? ((3.0 / 4.0) * (1 - (d * d))) : 0;//epanechnikov kernel
    List<Double> result = new ArrayList<>();
    double h = 10;
    for (int i = 0; i < source.width() + 1; i++)
        result.add(0.0);
    for (double d = low; d <= hih; d += 1) {
        double sum = 0;
        for (Double di : hor) {
            sum += K.apply((d - di) / h);
        }
        result.set((int) d, sum / (n * h));
        System.out.println(sum / (n * h));
    }
    normalize(result, 255);
    Mat test = new Mat(source.height(), source.width(), source.type());
    source.copyTo(test);
    draw(result, test);
    Imgcodecs.imwrite("test_uwot.png", test);
}

From source file:opencltest.YetAnotherTestT.java

private static void drawCross(double x, double y, Mat foundSquare) {
    Imgproc.line(foundSquare, new Point(x, 0), new Point(x, foundSquare.height()), new Scalar(255, 0, 0, 180),
            1);//from   ww w  . j a  v  a 2s  .co  m

    Imgproc.line(foundSquare, new Point(0, y), new Point(foundSquare.width(), y), new Scalar(255, 0, 0, 180),
            1);
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

public static Bitmap makeBitmap(@NonNull Mat mat) {
    try {/* w  ww  . j a v a 2  s. c  o m*/
        Bitmap bitmap = Bitmap.createBitmap(mat.width(), mat.height(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(mat, bitmap);

        //double max = bitmap.getHeight() > bitmap.getWidth() ? bitmap.getHeight() : bitmap.getWidth();
        //double min = bitmap.getHeight() < bitmap.getWidth() ? bitmap.getHeight() : bitmap.getWidth();
        //double ratio = min / max;
        //int width = (int) Math.max(600, max);
        //int height = (int) Math.round(ratio * width);

        return Bitmap.createScaledBitmap(bitmap, mat.width(), mat.height(), false);

    } catch (Exception e) {
        Timber.e(e);
    }
    return null;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

@NonNull
public static Mat createStripMat(@NonNull Mat mat, @NonNull Point centerPatch, boolean grouped, int maxWidth) {
    //done with lab schema, make rgb to show in image view
    // mat holds the strip image
    Imgproc.cvtColor(mat, mat, Imgproc.COLOR_Lab2RGB);

    int rightBorder = 0;

    double ratio;
    if (mat.width() < MIN_STRIP_WIDTH) {
        ratio = MAX_STRIP_HEIGHT / mat.height();
        rightBorder = BORDER_SIZE;/*  w  ww  . j av  a2s  . c om*/
    } else {
        ratio = (double) (maxWidth - 10) / (double) mat.width();
    }

    Imgproc.resize(mat, mat, new Size(mat.width() * ratio, mat.height() * ratio));

    Core.copyMakeBorder(mat, mat, BORDER_SIZE + ARROW_TRIANGLE_HEIGHT, BORDER_SIZE * 2, 0, rightBorder,
            Core.BORDER_CONSTANT,
            new Scalar(MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE));

    // Draw an arrow to the patch
    // only draw if this is not a 'grouped' strip
    if (!grouped) {
        double x = centerPatch.x * ratio;
        double y = 0;
        MatOfPoint matOfPoint = new MatOfPoint(new Point((x - ARROW_TRIANGLE_LENGTH), y),
                new Point((x + ARROW_TRIANGLE_LENGTH), y), new Point(x, y + ARROW_TRIANGLE_HEIGHT),
                new Point((x - ARROW_TRIANGLE_LENGTH), y));

        Imgproc.fillConvexPoly(mat, matOfPoint, DARK_GRAY, Core.LINE_AA, 0);
    }
    return mat;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

/**
 * Create Mat with swatches for the colors in the color chart range and also write the value.
 *
 * @param colors the colors to draw//from w w w . j a  v a2  s. co m
 * @param width  the final width of the Mat
 * @return the created Mat
 */
@NonNull
public static Mat createColorRangeMatSingle(@NonNull JSONArray colors, int width) {

    double gutterWidth = X_MARGIN;
    if (colors.length() > 10) {
        gutterWidth = 2d;
        width -= 10;
    }

    double xTranslate = (double) width / (double) colors.length();

    Mat colorRangeMat = new Mat((int) xTranslate + MEASURE_LINE_HEIGHT, width, CvType.CV_8UC3, LAB_WHITE);

    double previousPos = 0;
    for (int d = 0; d < colors.length(); d++) {
        try {

            JSONObject colorObj = colors.getJSONObject(d);

            double value = colorObj.getDouble(SensorConstants.VALUE);
            JSONArray lab = colorObj.getJSONArray(SensorConstants.LAB);
            Scalar scalarLab = new Scalar((lab.getDouble(0) / 100) * MAX_RGB_INT_VALUE, lab.getDouble(1) + 128,
                    lab.getDouble(2) + 128);

            //draw a rectangle filled with color for result value
            Point topLeft = new Point(xTranslate * d, Y_COLOR_RECT);
            Point bottomRight = new Point(topLeft.x + xTranslate - gutterWidth, Y_COLOR_RECT + xTranslate);
            Imgproc.rectangle(colorRangeMat, topLeft, bottomRight, scalarLab, -1);

            Size textSizeValue = Imgproc.getTextSize(DECIMAL_FORMAT.format(value), Core.FONT_HERSHEY_SIMPLEX,
                    NORMAL_FONT_SCALE, 2, null);
            double x = topLeft.x + (bottomRight.x - topLeft.x) / 2 - textSizeValue.width / 2;
            //draw color value below rectangle. Skip if too close to the previous label
            if (x > previousPos + MIN_COLOR_LABEL_WIDTH || d == 0) {
                previousPos = x;

                String label;
                //no decimal places if too many labels to fit
                if (colors.length() > 10) {
                    label = String.format(Locale.getDefault(), "%.0f", value);
                } else {
                    label = DECIMAL_FORMAT.format(value);
                }

                //adjust x if too close to edge
                if (x + textSizeValue.width > colorRangeMat.width()) {
                    x = colorRangeMat.width() - textSizeValue.width;
                }

                Imgproc.putText(colorRangeMat, label,
                        new Point(x, Y_COLOR_RECT + xTranslate + textSizeValue.height + BORDER_SIZE),
                        Core.FONT_HERSHEY_SIMPLEX, NORMAL_FONT_SCALE, LAB_GREY, 2, Core.LINE_AA, false);
            }

        } catch (JSONException e) {
            Timber.e(e);
        }
    }
    return colorRangeMat;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

public static Mat getPatch(@NonNull Mat mat, @NonNull Point patchCenter, int subMatSize) {

    //make a subMat around center of the patch
    int minRow = (int) Math.round(Math.max(patchCenter.y - subMatSize, 0));
    int maxRow = (int) Math.round(Math.min(patchCenter.y + subMatSize, mat.height()));
    int minCol = (int) Math.round(Math.max(patchCenter.x - subMatSize, 0));
    int maxCol = (int) Math.round(Math.min(patchCenter.x + subMatSize, mat.width()));

    //  create subMat
    return mat.submat(minRow, maxRow, minCol, maxCol).clone();
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

public static ColorDetected getPatchColor(@NonNull Mat mat, @NonNull Point patchCenter, int subMatSize) {

    //make a subMat around center of the patch
    int minRow = (int) Math.round(Math.max(patchCenter.y - subMatSize, 0));
    int maxRow = (int) Math.round(Math.min(patchCenter.y + subMatSize, mat.height()));
    int minCol = (int) Math.round(Math.max(patchCenter.x - subMatSize, 0));
    int maxCol = (int) Math.round(Math.min(patchCenter.x + subMatSize, mat.width()));

    //  create subMat
    Mat patch = mat.submat(minRow, maxRow, minCol, maxCol);

    // compute the mean color and return it
    return OpenCVUtil.detectStripPatchColor(patch);
}

From source file:org.designosaurs.opmode.DesignosaursAuto.java

@Override
public void runOpMode() {
    appContext = hardwareMap.appContext;
    long startTime;

    if (TEST_MODE) {
        DesignosaursHardware.hardwareEnabled = false;

        Log.i(TAG, "*** TEST MODE ENABLED ***");
        Log.i(TAG, "Hardware is disabled, skipping to beacon search state.");
        Log.i(TAG, "Web debugging interface can be found at http://"
                + DesignosaursUtils.getIpAddress(appContext) + ":9001/");

        autonomousState = STATE_SEARCHING;
    }//from  www  .ja va 2s .  c  o  m

    setInitState("Configuring hardware...");
    robot.init(hardwareMap);

    setInitState("Initializing vuforia...");
    VuforiaLocalizer.Parameters params = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    params.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    params.vuforiaLicenseKey = VUFORIA_LICENCE_KEY;

    vuforia = new VuforiaLocalizerImplSubclass(params);

    // Vuforia tracks the images, we'll call them beacons
    VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    beacons.get(0).setName("wheels");
    beacons.get(1).setName("tools");
    beacons.get(2).setName("legos");
    beacons.get(3).setName("gears");

    setInitState("Initializing OpenCV...");
    OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, appContext, mLoaderCallback);

    setInitState("Select team color using the gamepad.");

    while (!isStarted() && !isStopRequested()) {
        updateTeamColor();

        robot.waitForTick(25);
    }

    if (DesignosaursHardware.hardwareEnabled) {
        buttonPusherManager.start();
        buttonPusherManager.setStatus(ButtonPusherManager.STATE_HOMING);
        shooterManager.start();
        shooterManager.setStatus(ShooterManager.STATE_AT_BASE);
    }

    if (HOTEL_MODE)
        setState(STATE_SEARCHING);

    beacons.activate();
    startTime = System.currentTimeMillis();
    robot.startTimer();

    long ticksSeeingImage = 0;
    while (opModeIsActive()) {
        String beaconName = "";

        // Detect and process images
        for (VuforiaTrackable beac : beacons) {
            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beac.getListener()).getRawPose();

            if (pose != null) {
                beaconName = beac.getName();

                if (beac.getName().equals(lastScoredBeaconName)) // fixes seeing the first beacon and wanting to go back to it
                    continue;

                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);
                lastPose = rawPose;

                recalculateCriticalPoints();
                boundPoints();
                centeredPos = center.y; // drive routines align based on this
            }
        }

        if (vuforia.rgb != null && ENABLE_CAMERA_STREAMING
                && System.currentTimeMillis() > (lastFrameSentAt + 50)) {
            lastFrameSentAt = System.currentTimeMillis();

            Bitmap bm = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bm.copyPixelsFromBuffer(vuforia.rgb.getPixels());

            Bitmap resizedbitmap = DesignosaursUtils.resize(bm, bm.getWidth() / 2, bm.getHeight() / 2);
            FtcRobotControllerActivity.webServer.streamCameraFrame(resizedbitmap);

            if (center != null) {
                ArrayList<String> coords = new ArrayList<>(4);
                coords.add(start.toString());
                coords.add(end.toString());
                coords.add(center.toString());

                FtcRobotControllerActivity.webServer.streamPoints(coords);
            }
        }

        switch (autonomousState) {
        case STATE_INITIAL_POSITIONING:
            robot.startOrientationTracking();

            if (teamColor == TEAM_BLUE) {
                robot.rightMotor.setDirection(DcMotor.Direction.REVERSE);
                robot.leftMotor.setDirection(DcMotor.Direction.FORWARD);
            }

            if (teamColor == TEAM_RED) {
                robot.accel(0.3, 0.5);
                shooterManager.setStatus(ShooterManager.STATE_SCORING);
                robot.setDrivePower(0);
                robot.waitForTick(1400);
                robot.accel(0.3, 0.5);
                robot.waitForTick(1400);
                robot.goStraight(-0.2, 0.8);

                updateRunningState("Initial turn...");
                robot.turn(-34, 0.3);

                updateRunningState("Secondary move...");
                robot.accel(0.5, FAST_DRIVE_POWER);
                robot.goStraight(2.8, FAST_DRIVE_POWER);
                robot.decel(0.5, 0);

                updateRunningState("Secondary turn...");
                robot.turn(38, 0.2);
            } else {
                robot.goStraight(-0.3, 0.5);
                shooterManager.setStatus(ShooterManager.STATE_SCORING);
                robot.setDrivePower(0);
                robot.accel(-0.3, 0.5);
                robot.waitForTick(1400);
                robot.goStraight(-0.8, 0.4);
                robot.setDrivePower(0);

                robot.turn(179, 0.45);

                updateRunningState("Initial turn...");
                robot.turn(22, 0.3);

                updateRunningState("Secondary move...");
                robot.accel(0.5, FAST_DRIVE_POWER);
                robot.goStraight(4.1, FAST_DRIVE_POWER);
                robot.decel(0.5, 0);

                updateRunningState("Secondary turn...");
                robot.turn(-35, 0.3);
            }

            robot.setDrivePower(0);
            // Allow the camera time to focus:
            robot.waitForTick(1000);
            setState(STATE_SEARCHING);
            break;
        case STATE_SEARCHING:
            if (Math.abs(getRelativePosition()) < BEACON_ALIGNMENT_TOLERANCE) {
                stateMessage = "Analysing beacon data...";
                robot.setDrivePower(0);
                ticksSeeingImage++;
                vuforia.disableFlashlight();

                if (ticksSeeingImage < 5)
                    continue;

                byte pass = 0;
                boolean successful = false;
                BeaconPositionResult lastBeaconPosition = null;
                int[] range = { 0, 500 };
                Mat image = null;

                while (!successful) {
                    image = getRegionAboveBeacon();
                    if (image == null || vuforia.rgb == null) {
                        Log.w(TAG, "No frame! _");
                        robot.setDrivePower(MIN_DRIVE_POWER);

                        continue;
                    }

                    lastBeaconPosition = beaconFinder.process(System.currentTimeMillis(), image, SAVE_IMAGES)
                            .getResult();
                    range = lastBeaconPosition.getRangePixels();

                    if (range[0] < 0)
                        range[0] = 0;

                    if (range[1] > image.width())
                        range[1] = image.width();

                    Log.i(TAG, "Beacon finder results: " + lastBeaconPosition.toString());

                    if (lastBeaconPosition.isConclusive())
                        successful = true;
                    else {
                        pass++;

                        Log.i(TAG, "Searching for beacon presence, pass #" + beaconsFound + ":" + pass + ".");

                        // We can't see both buttons, so move back and forth and run detection algorithm again
                        if (pass <= 3)
                            robot.goCounts(teamColor == TEAM_RED ? -400 : 400, 0.2);

                        if (pass <= 4 && pass >= 3)
                            robot.goCounts(teamColor == TEAM_RED ? 1000 : -1000, 0.2);

                        if (pass > 4) {
                            robot.goCounts(teamColor == TEAM_RED ? -1600 : 1600, 0.4);
                            successful = true;
                        }

                        // Allow camera time to autofocus:
                        robot.setDrivePower(0);
                        robot.waitForTick(100);
                    }
                }

                if (autonomousState != STATE_SEARCHING)
                    continue;

                // Change the values in the following line for how much off the larger image we crop (y-wise,
                // the x axis is controlled by where the robot thinks the beacon is, see BeaconFinder).
                // TODO: Tune this based on actual field
                Log.i(TAG, "Source image is " + image.height() + "px by " + image.width() + "px");

                int width = range[1] - range[0];
                Log.i(TAG, "X: " + range[0] + " Y: 0, WIDTH: " + width + ", HEIGHT: "
                        + (image.height() > 50 ? image.height() - 50 : image.height()));

                if (range[0] < 0)
                    range[0] = 0;

                if (width < 0)
                    width = image.width() - range[0];

                Mat croppedImageRaw = new Mat(image,
                        new Rect(range[0], 0, width,
                                image.height() > 50 ? image.height() - 50 : image.height())),
                        croppedImage = new Mat();

                Imgproc.resize(croppedImageRaw, croppedImage, new Size(), 0.5, 0.5, Imgproc.INTER_LINEAR);

                if (OBFUSCATE_MIDDLE)
                    Imgproc.rectangle(croppedImage, new Point((croppedImage.width() / 2) - 35, 0),
                            new Point((croppedImage.width() / 2) + 55, croppedImage.height()),
                            new Scalar(255, 255, 255, 255), -1);

                BeaconColorResult lastBeaconColor = beaconProcessor
                        .process(System.currentTimeMillis(), croppedImage, SAVE_IMAGES).getResult();
                BeaconColorResult.BeaconColor targetColor = (teamColor == TEAM_RED
                        ? BeaconColorResult.BeaconColor.RED
                        : BeaconColorResult.BeaconColor.BLUE);

                Log.i(TAG, "*** BEACON FOUND ***");
                Log.i(TAG, "Target color: "
                        + (targetColor == BeaconColorResult.BeaconColor.BLUE ? "Blue" : "Red"));
                Log.i(TAG, "Beacon colors: " + lastBeaconColor.toString());

                // Beacon is already the right color:
                if (lastBeaconColor.getLeftColor() == targetColor
                        && lastBeaconColor.getRightColor() == targetColor) {
                    robot.accel(0.5, 1);
                    robot.goStraight(targetSide == SIDE_LEFT ? 1.2 : 0.7, 1);
                    robot.decel(0.5, DRIVE_POWER);
                    advanceToSecondBeacon(beaconName);

                    continue;
                }

                // Both sides of the beacon are the wrong color, so just score the left side:
                if (lastBeaconColor.getLeftColor() != targetColor
                        && lastBeaconColor.getRightColor() != targetColor) {
                    targetSide = SIDE_LEFT;
                    robot.setDrivePower(-DRIVE_POWER * 0.75);
                    setState(STATE_ALIGNING_WITH_BEACON);

                    continue;
                }

                // TODO: Replace goStraight call with proper combined distance from beacon offset + target side offset
                robot.goStraight(lastBeaconPosition.getOffsetFeet(), DRIVE_POWER);
                robot.setDrivePower(0);

                robot.resetDriveEncoders();
                targetSide = lastBeaconColor.getLeftColor() == targetColor ? SIDE_LEFT : SIDE_RIGHT;
                robot.setDrivePower(-DRIVE_POWER * 0.75);

                setState(STATE_ALIGNING_WITH_BEACON);
            } else if (Math.abs(getRelativePosition()) < SLOW_DOWN_AT) {
                stateMessage = "Beacon seen, centering (" + String.valueOf(getRelativePosition()) + ")...";
                robot.resetDriveEncoders();

                if (getRelativePosition() > 0 && getRelativePosition() != Integer.MAX_VALUE)
                    robot.setDrivePower(-MIN_DRIVE_POWER);
                else
                    robot.setDrivePower(MIN_DRIVE_POWER);
            }
            break;
        case STATE_ALIGNING_WITH_BEACON:
            stateMessage = "Positioning to deploy placer...";

            if (ticksInState > 450)
                robot.emergencyStop();

            double targetCounts = (targetSide == SIDE_LEFT) ? 1150 : 250;

            if (Math.max(Math.abs(robot.getAdjustedEncoderPosition(robot.leftMotor)),
                    Math.abs(robot.getAdjustedEncoderPosition(robot.rightMotor))) >= targetCounts) {
                Log.i(TAG, "//// DEPLOYING ////");

                robot.setDrivePower(0);
                robot.startOrientationTracking(true);
                buttonPusherManager.setStatus(ButtonPusherManager.STATE_SCORING);

                setState(STATE_WAITING_FOR_PLACER);
            }
            break;
        case STATE_WAITING_FOR_PLACER:
            stateMessage = "Waiting for placer to deploy.";

            if ((buttonPusherManager.getStatus() != ButtonPusherManager.STATE_SCORING
                    && buttonPusherManager.getTicksInState() >= 10)
                    || buttonPusherManager.getStatus() == ButtonPusherManager.STATE_AT_BASE) {
                advanceToSecondBeacon(beaconName);

                if (beaconsFound == 2 || HOTEL_MODE)
                    setState(STATE_FINISHED);
                else {
                    robot.turn(teamColor == TEAM_RED ? -3 : 3, 0.2);
                    robot.accel(0.5, 1);
                    robot.turn(teamColor == TEAM_RED ? 2 : -2, 0.2);
                    robot.goStraight(targetSide == SIDE_LEFT ? 1.5 : 1, 1);
                    robot.decel(0.5, DRIVE_POWER);

                    setState(STATE_SEARCHING);
                }
            }
        }

        ticksInState++;
        updateRunningState();
        robot.waitForTick(20);
    }
}

From source file:org.firstinspires.ftc.teamcode.opmodes.demo.VuforiaNavigationTest1.java

License:Open Source License

@Override
public void loop() {
    VuforiaLocalizer.CloseableFrame frame;
    mVLib.loop(true); // update location info and do debug telemetry
    if (!mVLib.getFrames().isEmpty()) {
        frame = mVLib.getFrames().poll();

        Image img = frame.getImage(0);

        ByteBuffer buf = img.getPixels();

        Mat m = new Mat();
        m.put(0, 0, buf.array());/*from  w w  w. jav  a2s  . c  o  m*/
        telemetry.addData("Mat Width:", m.width());
        telemetry.addData("Mat Height:", m.height());
    }

}