Example usage for org.opencv.core Mat rows

List of usage examples for org.opencv.core Mat rows

Introduction

In this page you can find the example usage for org.opencv.core Mat rows.

Prototype

public int rows() 

Source Link

Usage

From source file:org.firstinspires.ftc.teamcode.VuforiaMovement.java

public void runOpMode() throws InterruptedException {
    frontRightMotor = hardwareMap.dcMotor.get("frontRightMotor");
    backRightMotor = hardwareMap.dcMotor.get("backRightMotor");
    frontLeftMotor = hardwareMap.dcMotor.get("frontLeftMotor");
    backLeftMotor = hardwareMap.dcMotor.get("backLeftMotor");
    rollerMotor = hardwareMap.dcMotor.get("rollerMotor");

    backRightMotor.setDirection(DcMotor.Direction.REVERSE);
    backLeftMotor.setDirection(DcMotor.Direction.REVERSE);

    colorDetector = new ColorBlobDetector();

    VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId);
    parameters.vuforiaLicenseKey = "ATjJBiD/////AAAAGc0JoedLjk5flVb2gExO3UVJCpOq5U4cyH9czcMyX5C8h+1AWXo7A0CU24r/IVeoC+7Te9zwJkX6IjHv5c77UNqrsyerM7pbjywj6/2NlzSUwb3jtEd9APhY5cOoSibb5NDRFM9beUWt0k4HuFMaw5OIZRs5YWge7KaJt5SzhqEFMQ6Loo8eugB9BBbPfuV3d7u4sQZBAKeRsR9mmnfvFJTUHHgcPlALU/rJBgw40AeFFvChjzNhwtlWYymeM/0173jH7JB2dyhoNtn/9byIUQzMw8KtaXbD3IfFJySLgJWmYjaA7cKdboL0nvkOoZNFMm2yqenbUDe/CEIMkhAsKjS3sgX4t6Fq+8gkhSnOS/Vd";
    parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
    this.vuforia = new VuforiaLocalizerImplSubclass(parameters);

    Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 4);
    VuforiaTrackables beacons = vuforia.loadTrackablesFromAsset("FTC_2016-17");
    beacons.get(0).setName("Wheels");
    beacons.get(1).setName("Tools");
    beacons.get(2).setName("Legos");
    beacons.get(3).setName("Gears");

    float mmPerInch = 25.4f;
    float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
    float mmFTCFieldWidth = (12 * 12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
    float mmVisionTargetZOffset = 5.75f * mmPerInch;

    // Initialize the location of the targets and phone on the field
    OpenGLMatrix wheelsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(mmFTCFieldWidth / 12, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(0).setLocation(wheelsTargetLocationOnField);
    RobotLog.ii(TAG, "Wheels Target=%s", format(wheelsTargetLocationOnField));

    OpenGLMatrix toolsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, mmFTCFieldWidth / 4, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(1).setLocation(toolsTargetLocationOnField);
    RobotLog.ii(TAG, "Tools Target=%s", format(toolsTargetLocationOnField));

    OpenGLMatrix legosTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the Blue Audience wall.
            Our translation here is a positive translation in Y.*/
            .translation(-mmFTCFieldWidth / 4, mmFTCFieldWidth / 2, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 0, 0));
    beacons.get(2).setLocation(legosTargetLocationOnField);
    RobotLog.ii(TAG, "Legos Target=%s", format(legosTargetLocationOnField));

    OpenGLMatrix gearsTargetLocationOnField = OpenGLMatrix
            /* Then we translate the target off to the RED WALL. Our translation here
            is a negative translation in X.*/
            .translation(-mmFTCFieldWidth / 2, -mmFTCFieldWidth / 12, mmVisionTargetZOffset)
            .multiplied(Orientation.getRotationMatrix(
                    /* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
                    AxesReference.EXTRINSIC, AxesOrder.XZX, AngleUnit.DEGREES, 90, 90, 0));
    beacons.get(3).setLocation(gearsTargetLocationOnField);
    RobotLog.ii(TAG, "Gears Target=%s", format(gearsTargetLocationOnField));

    OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix.translation(mmBotWidth / 2, 0, 0).multiplied(Orientation
            .getRotationMatrix(AxesReference.EXTRINSIC, AxesOrder.YZY, AngleUnit.DEGREES, -90, 0, 0));
    RobotLog.ii(TAG, "Phone=%s", format(phoneLocationOnRobot));

    ((VuforiaTrackableDefaultListener) beacons.get(0).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(1).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(2).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);
    ((VuforiaTrackableDefaultListener) beacons.get(3).getListener()).setPhoneInformation(phoneLocationOnRobot,
            parameters.cameraDirection);

    /** Wait for the game to begin */
    telemetry.addData(">", "Press Play to start tracking");
    telemetry.addData("OpenCV", Core.NATIVE_LIBRARY_NAME);
    telemetry.update();/*from w  w  w . j a  v a 2  s  . c o  m*/
    waitForStart();

    /** Start tracking the data sets we care about. */
    beacons.activate();

    hitRed = true;
    isButtonHit = false;
    directionFoundInARow = 0;
    directionToHit = "";

    movingToFirstBeacon = false;
    liningUpWithFirstBeacon = false;
    movingToSecondBeacon = false;
    liningUpWithSecondBeacon = false;
    while (opModeIsActive()) {
        String visibleTarget = "";
        Mat img = null;
        Mat croppedImg = null;
        Point beaconImageCenter = null;

        if (movingToFirstBeacon) {
            // TODO Estimate distance to the beacon from a point TBD
            // TODO Estimate distance to move forward and turn to face the beacon until second movement set
            // Move this outside the loop?

            // Move forward until you see the beacon
            while (movingToFirstBeacon) {
                // Move in increments to minimize the times you check the trackables
                for (int i = 0; i < 50; i++) {
                    frontRightMotor.setPower(1);
                    backRightMotor.setPower(1);
                    frontLeftMotor.setPower(1);
                    backLeftMotor.setPower(1);
                }

                for (VuforiaTrackable beacon : beacons) {
                    // Add beacon to telemetry if visible
                    if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                        visibleTarget = beacon.getName();
                        telemetry.addData(visibleTarget, "Visible");
                    }

                    OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon
                            .getListener()).getUpdatedRobotLocation();
                    if (robotLocationTransform != null) {
                        lastLocation = robotLocationTransform;
                    }
                }

                // Move to the beacon until the beacon is in sight
                if (lastLocation != null) {
                    movingToFirstBeacon = false; // Only execute this once
                }
            }
        }

        while (liningUpWithFirstBeacon) {
            for (VuforiaTrackable beacon : beacons) {
                // Add beacon to telemetry if visible
                if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                    visibleTarget = beacon.getName();
                    telemetry.addData(visibleTarget, "Visible");
                }

                OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                        .getUpdatedRobotLocation();
                if (robotLocationTransform != null) {
                    lastLocation = robotLocationTransform;
                }
            }

            RobotMovement movement = processLocation(lastLocation, visibleTarget);
            if (movement.isNoMovement()) {
                liningUpWithFirstBeacon = false;
            }

            processMovement(movement);
        }

        if (movingToSecondBeacon) {
            // TODO Estimate the movements/distance from the first beacon to the second
            movingToSecondBeacon = false; // Only execute this once
        }

        if (vuforia.rgb != null && !isButtonHit) {
            Bitmap bmp = Bitmap.createBitmap(vuforia.rgb.getWidth(), vuforia.rgb.getHeight(),
                    Bitmap.Config.RGB_565);
            bmp.copyPixelsFromBuffer(vuforia.rgb.getPixels());

            img = new Mat();
            Utils.bitmapToMat(bmp, img);
        }

        for (VuforiaTrackable beacon : beacons) {
            // Add beacon to telemetry if visible
            if (((VuforiaTrackableDefaultListener) beacon.getListener()).isVisible()) {
                visibleTarget = beacon.getName();
                telemetry.addData(visibleTarget, "Visible");
            }

            OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener) beacon.getListener())
                    .getUpdatedRobotLocation();
            if (robotLocationTransform != null) {
                lastLocation = robotLocationTransform;
            }

            OpenGLMatrix pose = ((VuforiaTrackableDefaultListener) beacon.getListener()).getRawPose();

            if (pose != null) {
                Matrix34F rawPose = new Matrix34F();
                float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);
                rawPose.setData(poseData);

                // Corners of beacon image in camera image
                Vec2F upperLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, 92, 0));
                Vec2F upperRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, 92, 0));
                Vec2F lowerLeft = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(-127, -92, 0));
                Vec2F lowerRight = Tool.projectPoint(vuforia.getCameraCalibration(), rawPose,
                        new Vec3F(127, -92, 0));

                VectorF translation = pose.getTranslation();
                /** First argument is get(1) if phone is vertical
                 First argument is get(0) if phone is horizontal */
                // DOES NOT WORK???
                degreesToTurn = Math.toDegrees(Math.atan2(translation.get(1), translation.get(2)));
                telemetry.addData("Degrees-", degreesToTurn);
                // TODO Check degreee turning threshold
                if (Math.abs(degreesToTurn) > 15) {
                    // Turn after doing calculating transformations
                    needToTurn = true;
                }

                if (img != null && !isButtonHit) {
                    telemetry.addData(beacon.getName() + "-Translation", translation);
                    telemetry.addData(beacon.getName() + "-Degrees", degreesToTurn);

                    // Vectors are stored (y,x).  Coordinate system starts in top right
                    int height = (int) (lowerLeft.getData()[0] - upperLeft.getData()[0]);
                    int width = (int) (upperLeft.getData()[1] - upperRight.getData()[1]);

                    int rowStart = (int) upperRight.getData()[0] - height < 0 ? 0
                            : (int) upperRight.getData()[0] - height;
                    int rowEnd = rowStart + height > img.rows() ? img.rows() - 1 : rowStart + height;
                    int colStart = (int) upperRight.getData()[1] < 0 ? 0 : (int) upperRight.getData()[1];
                    int colEnd = colStart + width > img.cols() ? img.cols() - 1 : colStart + width;

                    telemetry.addData("Target Location", "");
                    telemetry.addData("[" + upperLeft.getData()[0] + "," + upperLeft.getData()[1] + "]",
                            "[" + upperRight.getData()[0] + "," + upperRight.getData()[1] + "]");
                    telemetry.addData("[" + lowerLeft.getData()[0] + "," + lowerLeft.getData()[1] + "]",
                            "[" + lowerRight.getData()[0] + "," + lowerRight.getData()[1] + "]");

                    telemetry.addData(colStart + "", rowStart);
                    telemetry.addData(colEnd + "", rowEnd);

                    // Crop the image to look only at the beacon
                    // TODO Verify beacon is in cropped image
                    // NEED TO CHECK BEACON HEIGHT FOR INCLUSION IN CROPPED IMAGE
                    croppedImg = img.submat(rowStart, rowEnd, colStart, colEnd);
                }
            }
        }

        // Process the rgb image
        if (croppedImg != null && !isButtonHit) {
            // Find the color of the beacon you need to hit
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point beaconToHitCenter = null;
            List<Moments> blueMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                blueMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = blueMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                beaconToHitCenter = new Point(x, y);
            }

            // Find the color of the beacon you are not hitting
            if (hitRed) {
                colorDetector.setHsvColor(new Scalar(25, 255, 185)); // Blue detector, needs verification with beacon
            } else {
                colorDetector.setHsvColor(new Scalar(180, 240, 240)); // Red detector, needs verification with beacon
            }
            colorDetector.process(croppedImg);

            // Calculate the center of the blob detected
            Point secondReferenceCenter = null;
            List<Moments> redMu = new ArrayList<>(colorDetector.getContours().size());
            for (int i = 0; i < colorDetector.getContours().size(); i++) {
                redMu.add(Imgproc.moments(colorDetector.getContours().get(i), false));
                Moments p = redMu.get(i);
                int x = (int) (p.get_m10() / p.get_m00());
                int y = (int) (p.get_m01() / p.get_m00());
                secondReferenceCenter = new Point(x, y);
            }

            // Use the two centers of the blobs to determine which direction to hit
            if (beaconToHitCenter != null && secondReferenceCenter != null && !isButtonHit && !needToTurn) {
                // (!isButtonHit) Only hit the button once
                // (!needToTurn) Do not hit the button if the robot is not straight centered
                //                    hitBeaconButton(isLeft(center, beaconImageCenter));
                if (isLeft(beaconToHitCenter, secondReferenceCenter)) {
                    if (!directionToHit.equals("Left")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Left";
                } else {
                    if (!directionToHit.equals("Right")) {
                        directionFoundInARow = 0;
                    }
                    directionFoundInARow++;
                    directionToHit = "Right";
                }
            }

            // Find the color five times in a row before hitting it
            if (directionFoundInARow >= 3) {
                isButtonHit = true;
            }
        }

        if (isButtonHit) {
            telemetry.addData("Hit Button-", directionToHit);
        }

        //            if(needToTurn) {
        //                turn(degreesToTurn);
        //                telemetry.addData("Turn-", degreesToTurn);
        //            }

        /**
         * Provide feedback as to where the robot was last located (if we know).
         */
        if (lastLocation != null) {
            //  RobotLog.vv(TAG, "robot=%s", format(lastLocation));
            telemetry.addData("Pos", myFormat(lastLocation));

            if (!visibleTarget.equals("")) {
                telemetry.addData("Move", processLocation(lastLocation, visibleTarget));
            }
        } else {
            telemetry.addData("Pos", "Unknown");
        }

        telemetry.update();
        idle();
    }
}

From source file:org.firstinspires.ftc.teamcode.VuforiaOp.java

public int getBeaconConfig(Image img, VuforiaTrackableDefaultListener beacon, CameraCalibration camCal) {

    OpenGLMatrix pose = beacon.getRawPose();

    if (pose != null && img != null && img.getPixels() != null) {

        Matrix34F rawPose = new Matrix34F();
        float[] poseData = Arrays.copyOfRange(pose.transposed().getData(), 0, 12);

        rawPose.setData(poseData);/*  w  ww.jav a 2  s .  com*/

        //calculating pixel coordinates of beacon corners
        float[][] corners = new float[4][2];

        corners[0] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 276, 0)).getData(); //upper left of beacon
        corners[1] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 276, 0)).getData(); //upper right of beacon
        corners[2] = Tool.projectPoint(camCal, rawPose, new Vec3F(127, 92, 0)).getData(); //lower right of beacon
        corners[3] = Tool.projectPoint(camCal, rawPose, new Vec3F(-127, 92, 0)).getData(); //lower left of beacon

        //getting camera image...
        Bitmap bm = Bitmap.createBitmap(img.getWidth(), img.getHeight(), Bitmap.Config.RGB_565);
        bm.copyPixelsFromBuffer(img.getPixels());

        //turning the corner pixel coordinates into a proper bounding box
        Mat crop = OCVUtils.bitmapToMat(bm, CvType.CV_8UC3);
        float x = Math.min(Math.min(corners[1][0], corners[3][0]), Math.min(corners[0][0], corners[2][0]));
        float y = Math.min(Math.min(corners[1][1], corners[3][1]), Math.min(corners[0][1], corners[2][1]));
        float width = Math.max(Math.abs(corners[0][0] - corners[2][0]),
                Math.abs(corners[1][0] - corners[3][0]));
        float height = Math.max(Math.abs(corners[0][1] - corners[2][1]),
                Math.abs(corners[1][1] - corners[3][1]));

        //make sure our bounding box doesn't go outside of the image
        //OpenCV doesn't like that...
        x = Math.max(x, 0);
        y = Math.max(y, 0);
        width = (x + width > crop.cols()) ? crop.cols() - x : width;
        height = (y + height > crop.rows()) ? crop.rows() - y : height;

        //cropping bounding box out of camera image
        final Mat cropped = new Mat(crop, new Rect((int) x, (int) y, (int) width, (int) height));

        Bitmap pic = OCVUtils.matToBitmap(cropped);
        //filtering out non-beacon-blue colours in HSV colour space
        Imgproc.cvtColor(cropped, cropped, Imgproc.COLOR_RGB2HSV_FULL);

        /*try
        {
        FileOutputStream out = new FileOutputStream(new File("/storage/emulated/0/", "poop.txt"));
        out.write((new String("ppoooop")).getBytes());
        out.close();
        } catch (FileNotFoundException e){}
        catch (IOException e){}
        */

        /*try
        {
        FileOutputStream fos = new FileOutputStream(new File("/storage/emulated/0/", "cropped.png"));
                
        //bm.compress(Bitmap.CompressFormat.PNG, 90, fos);
        if (pic.compress(Bitmap.CompressFormat.PNG, 100, fos))
        {
        }
        else
        {
                
        }
        fos.close();
        }catch (IOException e)
        {}
                
        try
        {
        FileOutputStream fos = new FileOutputStream(new File("/storage/emulated/0/", "non.png"));
                
        //bm.compress(Bitmap.CompressFormat.PNG, 90, fos);
        if (bm.compress(Bitmap.CompressFormat.PNG, 100, fos))
        {
        }
        else
        {
            tempLog("didgfeds");
        }
        fos.close();
        }catch (IOException e)
        {}
        */

        //get filtered mask
        //if pixel is within acceptable blue-beacon-colour range, it's changed to white.
        //Otherwise, it's turned to black
        Mat mask = new Mat();

        Core.inRange(cropped, BEACON_BLUE_LOW, BEACON_BLUE_HIGH, mask);
        Moments mmnts = Imgproc.moments(mask, true);

        //calculating centroid of the resulting binary mask via image moments
        Log.i("CentroidX", "" + ((mmnts.get_m10() / mmnts.get_m00())));
        Log.i("CentroidY", "" + ((mmnts.get_m01() / mmnts.get_m00())));

        //checking if blue either takes up the majority of the image (which means the beacon is all blue)
        //or if there's barely any blue in the image (which means the beacon is all red or off)
        //            if (mmnts.get_m00() / mask.total() > 0.8) {
        //                return VortexUtils.BEACON_ALL_BLUE;
        //            } else if (mmnts.get_m00() / mask.total() < 0.1) {
        //                return VortexUtils.BEACON_NO_BLUE;
        //            }//elseif

        //Note: for some reason, we end up with a image that is rotated 90 degrees
        //if centroid is in the bottom half of the image, the blue beacon is on the left
        //if the centroid is in the top half, the blue beacon is on the right
        if ((mmnts.get_m01() / mmnts.get_m00()) < cropped.rows() / 2) {
            return VortexUtils.BEACON_RED_BLUE;
        } else {
            return VortexUtils.BEACON_BLUE_RED;
        } //else
    } //if

    return VortexUtils.NOT_VISIBLE;
}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

public void executePreprocessor() {
    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    Mat mat = originalMat.createGrayImage();
    showImageResult(mat, "nh gc");
    long blackCount = 0;
    long whiteCount = 0;

    for (int i = 0; i < mat.rows(); i++) {
        for (int j = 0; j < mat.cols(); j++) {
            double temp[] = mat.get(i, j);
            if (temp[0] > 230)
                whiteCount++;/*from  w w w .  j ava2  s .  c o m*/
            else if (temp[0] < 35)
                blackCount++;
        }
    }
    int index = 0;
    for (int i = 0; i < mat.rows(); i += 16) {
        for (int j = 0; j < mat.cols(); j += 8) {
            Rect rect = new Rect(new Point(i, j), new Size(8, 16));
            index++;
            System.out.println(rect.toString());
        }
    }
    System.out.println(index);
    ThresholdMat thresholdMat = new ThresholdMat(mat, 0, 255, Imgproc.THRESH_OTSU);
    Mat mat1 = thresholdMat.createMatResult();
    if (blackCount > whiteCount) {
        showImageResult(mat1, "nh ly  ly ngng");

        CloseMat openMat = new CloseMat(mat1, Imgproc.MORPH_RECT, 5, 5, 1);
        Mat mat2 = openMat.createMatResult();
        showImageResult(mat2, "Thut ton open");
    } else {

    }
}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

public void licensePlateDetection() {

    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    // Step 1/* w w  w. ja v a 2s. c  om*/
    Mat mat = originalMat.createGrayImage();

    long blackCount = 0;
    long whiteCount = 0;
    for (int i = 0; i < mat.rows(); i++) {
        for (int j = 0; j < mat.cols(); j++) {
            double temp[] = mat.get(i, j);
            if (temp[0] > 230)
                whiteCount++;
            else if (temp[0] < 35)
                blackCount++;
        }
    }
    System.out.println("whiteCount: " + whiteCount);
    System.out.println("blackCount: " + blackCount);
    // ImageResult imageResult1 = new ImageResult(mat,
    // "GrayImage1");
    // imageResult1.showResultImage();

    // Step 2
    BilateralFilteringMat bilateralFilteringMat = new BilateralFilteringMat(mat, 75, 75, 1);
    Mat mat2 = bilateralFilteringMat.createMatResult();

    // ImageResult imageResult2 = new ImageResult(mat2,
    // "Gauss");
    // imageResult2.showResultImage();
    // Step 3
    HistogramEqualizationMat histogramEqualizationMat = new HistogramEqualizationMat(mat2);

    Mat mat3 = histogramEqualizationMat.createMatResult();
    // ImageResult imageResult3 = new ImageResult(mat3,
    // "HistogramEqualizationMat");
    // imageResult3.showResultImage();

    // Step 4
    OpenMat openMat = new OpenMat(mat3, Imgproc.MORPH_RECT, 5, 5, 2.2);

    Mat mat4 = openMat.createMatResult();
    // ImageResult imageResult4 = new ImageResult(mat4,
    // "OpenMat");
    // imageResult4.showResultImage();

    // Step 5
    SubtractMat subtractMat = new SubtractMat(mat4, mat3);

    Mat mat5 = subtractMat.createMatResult();
    // ImageResult imageResult5 = new ImageResult(mat5,
    // "SubtractMat");
    // imageResult5.showResultImage();
    //
    // Step 6
    ThresholdMat thresholdMat = new ThresholdMat(mat5, 0, 255, Imgproc.THRESH_OTSU);

    // Mat mat6 = thresholdMat.createMatResult();
    // ImageResult imageResult6 = new ImageResult(mat6,
    // "THRESH_OTSU");
    // imageResult6.showResultImage();

    // //Step 7
    // CannyMat cannyMat = new CannyMat(mat6, 250,
    // 255);
    //
    // Mat mat7 = cannyMat.createMatResult();
    // ImageResult imageResult7 = new ImageResult(mat7,
    // "GrayImage7");
    // imageResult7.showResultImage();
    //
    // //Step 8
    // MorphologyMatBase morphologyMatBase = new MorphologyMatBase(
    // mat7, Imgproc.MORPH_RECT, 3, 3, 1);
    // Mat mat8 = morphologyMatBase.dilate();
    // ImageResult imageResult8 = new ImageResult(mat8,
    // "GrayImage8");
    // imageResult8.showResultImage();
    //
    // //Step 9
    // RectangleDetection rect = new RectangleDetection(mat8);
    // ImageResult imageResult = new
    // ImageResult(rect.executeRectangleDetection(),
    // "GrayImage9");
    // imageResult.showResultImage();

}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

/**
 * I'am working here.//  ww w .  jav a2 s .c  om
 * 
 * @return
 */
public List<Mat> processImagePointBlackBiggerThanPointWhite() {

    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    // Step 1
    Mat mat = originalMat.createGrayImage();

    ThresholdMat thresholdMat = new ThresholdMat(mat.clone(), 0, 255, Imgproc.THRESH_OTSU);
    Mat threshold = thresholdMat.createMatResult();

    MorphologyMatBase closeMat = new MorphologyMatBase(threshold, Imgproc.MORPH_RECT, 1, 1, 1);
    Mat close = closeMat.dilate();

    RectangleDetection rectangleDetection = new RectangleDetection(close);

    List<MatOfPoint> contoursDetectPlate = rectangleDetection.executeRectangleDetection();

    SubMat subMatDetectPlate = new SubMat(mat, contoursDetectPlate);

    // Get plate detected
    List<Mat> detectPlates = subMatDetectPlate.dropImage();
    Mat matDetectPlate = detectPlates.get(5);

    ImageResult imageResult = new ImageResult(matDetectPlate, "Result ");
    imageResult.showResultImage();

    // pre-process
    Mat matResult = new Mat(matDetectPlate.cols() * 2, matDetectPlate.rows() * 2, matDetectPlate.type());
    Imgproc.resize(matDetectPlate, matResult, new Size(matDetectPlate.cols() * 1, matDetectPlate.rows() * 1));

    ThresholdMat thresholdMatDetectPlate = new ThresholdMat(matResult, 0, 255, Imgproc.THRESH_OTSU);

    Mat thresholdMatDetectPlateMat = thresholdMatDetectPlate.createMatResult();

    ImageResult imageResults = new ImageResult(thresholdMatDetectPlateMat, "Result ");
    imageResults.showResultImage();

    CharacterSegment characterSegment = new CharacterSegment(thresholdMatDetectPlateMat.clone());

    List<MatOfPoint> contoursNumber = characterSegment.executeCharacterSegment();

    System.out.println(contoursNumber.size());
    SubMat subMatNumberImg = new SubMat(thresholdMatDetectPlateMat.clone(), contoursNumber);

    List<Mat> listNumberImg = subMatNumberImg.dropImage();

    return listNumberImg;
}

From source file:org.it.tdt.edu.vn.platedetection.process.LicensePlateDetection.java

License:Open Source License

public Map<String, List<Mat>> processImagePointBlackBiggerThanPointWhiteTest() {

    OriginalImage originalImage = new OriginalImage(imgUrl);
    BufferedImage bufferedImage = originalImage.getImageFromResourcesDirectory();
    OriginalMat originalMat = new OriginalMat(bufferedImage);

    // Step 1/*from w  w w.  j a va 2 s .co  m*/
    Mat mat = originalMat.createGrayImage();

    ThresholdMat thresholdMat = new ThresholdMat(mat.clone(), 0, 255, Imgproc.THRESH_OTSU);
    Mat threshold = thresholdMat.createMatResult();

    MorphologyMatBase closeMat = new MorphologyMatBase(threshold, Imgproc.MORPH_RECT, 1, 1, 1);
    Mat close = closeMat.dilate();

    RectangleDetection rectangleDetection = new RectangleDetection(close);

    List<MatOfPoint> contoursDetectPlate = rectangleDetection.executeRectangleDetection();

    SubMat subMatDetectPlate = new SubMat(mat, contoursDetectPlate);

    // Get plate detected
    List<Mat> detectPlates = subMatDetectPlate.dropImage();
    Map<String, List<Mat>> mapNumberImg = new HashMap<String, List<Mat>>();

    for (int i = 0; i < detectPlates.size(); i++) {
        // pre-process
        Mat matDetectPlate = detectPlates.get(i);
        Mat matResult = new Mat(matDetectPlate.cols() * 2, matDetectPlate.rows() * 2, matDetectPlate.type());
        Imgproc.resize(matDetectPlate, matResult,
                new Size(matDetectPlate.cols() * 1, matDetectPlate.rows() * 1));

        ThresholdMat thresholdMatDetectPlate = new ThresholdMat(matResult, 0, 255, Imgproc.THRESH_OTSU);

        Mat thresholdMatDetectPlateMat = thresholdMatDetectPlate.createMatResult();

        CharacterSegment characterSegment = new CharacterSegment(thresholdMatDetectPlateMat.clone());

        List<MatOfPoint> contoursNumber = characterSegment.executeCharacterSegment();

        SubMat subMatNumberImg = new SubMat(thresholdMatDetectPlateMat.clone(), contoursNumber);

        mapNumberImg.put(String.valueOf(i), subMatNumberImg.dropImage());
    }
    return mapNumberImg;
}

From source file:org.lasarobotics.vision.detection.objects.Detectable.java

License:Open Source License

/**
 * Gets the average color of the object/*  ww  w .j a  v a 2s.  c  o m*/
 *
 * @param img      The image matrix, of any color size
 * @param imgSpace The image's color space
 * @return The average color of the region
 */
public Color averageColor(Mat img, ColorSpace imgSpace) {
    //Coerce values to stay within screen dimensions
    double leftX = MathUtil.coerce(0, img.cols() - 1, left());
    double rightX = MathUtil.coerce(0, img.cols() - 1, right());

    double topY = MathUtil.coerce(0, img.rows() - 1, top());
    double bottomY = MathUtil.coerce(0, img.rows() - 1, bottom());

    //Input points into array for calculation
    //TODO rectangular submatrix-based calculation isn't perfectly accurate when you have ellipses or weird shapes
    Mat subMat = img.submat((int) topY, (int) bottomY, (int) leftX, (int) rightX);

    //Calculate average and return new color instance
    return Color.create(Core.mean(subMat), imgSpace);
}

From source file:org.lasarobotics.vision.util.color.Color.java

License:Open Source License

/**
 * Rapidly convert an RGBA matrix to a Grayscale matrix, bypassing
 * most of the color conversion overhead.
 *
 * @param rgba RGBA matrix/* ww w . ja v a 2  s. co  m*/
 * @return Grayscale matrix
 */
public static Mat rapidConvertRGBAToGRAY(Mat rgba) {
    Mat gray = new Mat(rgba.rows(), rgba.cols(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgba, gray, Imgproc.COLOR_RGBA2GRAY);
    return gray;
}

From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java

License:Open Source License

/**
 * Attempt to find matches of the given template within the current camera
 * frame. Matches are returned as TemplateMatch objects which contain
 * a Location in Camera coordinates. The results are sorted best score
 * to worst score./* w w w.  ja  v a 2s .  c  om*/
 * @param template
 * @return
 */
public List<TemplateMatch> getTemplateMatches(BufferedImage template) {
    // TODO: ROI
    BufferedImage image = camera.capture();

    // Convert the camera image and template image to the same type. This
    // is required by the cvMatchTemplate call.
    template = OpenCvUtils.convertBufferedImage(template, BufferedImage.TYPE_BYTE_GRAY);
    image = OpenCvUtils.convertBufferedImage(image, BufferedImage.TYPE_BYTE_GRAY);

    Mat templateMat = OpenCvUtils.toMat(template);
    Mat imageMat = OpenCvUtils.toMat(image);
    Mat resultMat = new Mat();

    Imgproc.matchTemplate(imageMat, templateMat, resultMat, Imgproc.TM_CCOEFF_NORMED);

    Mat debugMat = null;
    if (logger.isDebugEnabled()) {
        debugMat = imageMat.clone();
    }

    MinMaxLocResult mmr = Core.minMaxLoc(resultMat);
    double maxVal = mmr.maxVal;

    // TODO: Externalize?
    double threshold = 0.7f;
    double corr = 0.85f;

    double rangeMin = Math.max(threshold, corr * maxVal);
    double rangeMax = maxVal;

    List<TemplateMatch> matches = new ArrayList<TemplateMatch>();
    for (Point point : matMaxima(resultMat, rangeMin, rangeMax)) {
        TemplateMatch match = new TemplateMatch();
        int x = point.x;
        int y = point.y;
        match.score = resultMat.get(y, x)[0] / maxVal;

        if (logger.isDebugEnabled()) {
            Core.rectangle(debugMat, new org.opencv.core.Point(x, y),
                    new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()), new Scalar(255));
            Core.putText(debugMat, "" + match.score,
                    new org.opencv.core.Point(x + templateMat.cols(), y + templateMat.rows()),
                    Core.FONT_HERSHEY_PLAIN, 1.0, new Scalar(255));
        }

        Location offsets = getPixelCenterOffsets(x + (templateMat.cols() / 2), y + (templateMat.rows() / 2));
        match.location = camera.getLocation().subtract(offsets);
        matches.add(match);
    }

    Collections.sort(matches, new Comparator<TemplateMatch>() {
        @Override
        public int compare(TemplateMatch o1, TemplateMatch o2) {
            return ((Double) o2.score).compareTo(o1.score);
        }
    });

    saveDebugImage("template", templateMat);
    saveDebugImage("camera", imageMat);
    saveDebugImage("result", resultMat);
    saveDebugImage("debug", debugMat);

    return matches;
}

From source file:org.openpnp.machine.reference.vision.OpenCvVisionProvider.java

License:Open Source License

@Override
public Point[] locateTemplateMatches(int roiX, int roiY, int roiWidth, int roiHeight, int coiX, int coiY,
        BufferedImage templateImage_) throws Exception {
    BufferedImage cameraImage_ = camera.capture();

    // Convert the camera image and template image to the same type. This
    // is required by the cvMatchTemplate call.
    templateImage_ = OpenCvUtils.convertBufferedImage(templateImage_, BufferedImage.TYPE_INT_ARGB);
    cameraImage_ = OpenCvUtils.convertBufferedImage(cameraImage_, BufferedImage.TYPE_INT_ARGB);

    Mat templateImage = OpenCvUtils.toMat(templateImage_);
    Mat cameraImage = OpenCvUtils.toMat(cameraImage_);
    Mat roiImage = new Mat(cameraImage, new Rect(roiX, roiY, roiWidth, roiHeight));

    // http://stackoverflow.com/questions/17001083/opencv-template-matching-example-in-android
    Mat resultImage = new Mat(roiImage.cols() - templateImage.cols() + 1,
            roiImage.rows() - templateImage.rows() + 1, CvType.CV_32FC1);
    Imgproc.matchTemplate(roiImage, templateImage, resultImage, Imgproc.TM_CCOEFF);

    MinMaxLocResult mmr = Core.minMaxLoc(resultImage);

    org.opencv.core.Point matchLoc = mmr.maxLoc;
    double matchValue = mmr.maxVal;

    // TODO: Figure out certainty and how to filter on it.

    logger.debug(//from   www  . jav  a  2  s .c om
            String.format("locateTemplateMatches certainty %f at %f, %f", matchValue, matchLoc.x, matchLoc.y));
    locateTemplateMatchesDebug(roiImage, templateImage, matchLoc);

    return new Point[] { new Point(((int) matchLoc.x) + roiX, ((int) matchLoc.y) + roiY) };
}