Example usage for org.opencv.core Mat convertTo

List of usage examples for org.opencv.core Mat convertTo

Introduction

In this page you can find the example usage for org.opencv.core Mat convertTo.

Prototype

public void convertTo(Mat m, int rtype) 

Source Link

Usage

From source file:OCV_ConnectedComponentsWithStats.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    // src//from  w ww  . j a v a 2 s .c om
    int imw = ip.getWidth();
    int imh = ip.getHeight();
    byte[] src_arr = (byte[]) ip.getPixels();
    Mat src_mat = new Mat(imh, imw, CvType.CV_8UC1);

    // dst
    String titleDst = WindowManager
            .getUniqueName(impSrc.getTitle() + "_Connect" + String.valueOf(TYPE_INT[type_ind]));
    ImagePlus impDst = new ImagePlus(titleDst, new FloatProcessor(imw, imh));
    float[] dst_arr = (float[]) impDst.getChannelProcessor().getPixels();
    Mat dst_mat_32s = new Mat(imh, imw, CvType.CV_32S);
    Mat dst_mat_32f = new Mat(imh, imw, CvType.CV_32F);
    Mat stats_mat = new Mat();
    Mat cens_mat = new Mat();

    // run
    src_mat.put(0, 0, src_arr);
    int output_con = Imgproc.connectedComponentsWithStats(src_mat, dst_mat_32s, stats_mat, cens_mat,
            TYPE_INT[type_ind], CvType.CV_32S);
    dst_mat_32s.convertTo(dst_mat_32f, CvType.CV_32F);
    dst_mat_32f.get(0, 0, dst_arr);

    // show data
    if (1 < output_con) {
        showData(dst_arr, imw, imh, output_con, stats_mat, cens_mat);
    }

    // finish
    if (1 < output_con && enOutImg) {
        impDst.show();
    } else {
        impDst.close();
    }
}

From source file:OCV_DistanceTransform.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    // srcdst/*from   w  w w  . j ava 2 s .com*/
    int imw = ip.getWidth();
    int imh = ip.getHeight();
    float[] srcdst_floats = (float[]) ip.getPixels();

    // mat
    Mat src_mat_32f = new Mat(imh, imw, CvType.CV_32FC1);
    Mat src_mat_8u = new Mat(imh, imw, CvType.CV_8UC1);
    Mat dst_mat_32f = new Mat(imh, imw, CvType.CV_32FC1);

    // run
    src_mat_32f.put(0, 0, srcdst_floats);
    src_mat_32f.convertTo(src_mat_8u, CvType.CV_8UC1);
    Imgproc.distanceTransform(src_mat_8u, dst_mat_32f, INT_DISTANCETYPE[indDistType],
            INT_DISTANCETRANSFORMMASKS[indMskSize]);
    dst_mat_32f.get(0, 0, srcdst_floats);
}

From source file:OCV_Watershed.java

License:Open Source License

@Override
public void run(ImageProcessor ip) {
    // src (RGB)/*w  w w.  java 2 s . c o m*/
    int[] arr_src_rgb = (int[]) imp_src.getChannelProcessor().getPixels();
    int imw_src = imp_src.getWidth();
    int imh_src = imp_src.getHeight();
    Mat mat_src_rgb = new Mat(imh_src, imw_src, CvType.CV_8UC3);

    // map (32bit)
    float[] arr_map_32f = (float[]) imp_map.getChannelProcessor().getPixels();
    int imw_map = imp_map.getWidth();
    int imh_map = imp_map.getHeight();
    Mat mat_map_32f = new Mat(imh_map, imw_map, CvType.CV_32FC1);
    Mat mat_map_32s = new Mat(imh_map, imw_map, CvType.CV_32SC1);

    // run
    OCV__LoadLibrary.intarray2mat(arr_src_rgb, mat_src_rgb, imw_src, imh_src);
    mat_map_32f.put(0, 0, arr_map_32f);
    mat_map_32f.convertTo(mat_map_32s, CvType.CV_32SC1);

    Imgproc.watershed(mat_src_rgb, mat_map_32s);

    mat_map_32s.convertTo(mat_map_32f, CvType.CV_32FC1);
    mat_map_32f.get(0, 0, arr_map_32f);
}

From source file:ch.zhaw.facerecognitionlibrary.Helpers.MatOperation.java

License:Open Source License

public static Rect[] rotateFaces(Mat img, Rect[] faces, int angle) {
    Point center = new Point(img.cols() / 2, img.rows() / 2);
    Mat rotMat = Imgproc.getRotationMatrix2D(center, angle, 1);
    rotMat.convertTo(rotMat, CvType.CV_32FC1);
    float scale = img.cols() / img.rows();
    for (Rect face : faces) {
        Mat m = new Mat(3, 1, CvType.CV_32FC1);
        m.put(0, 0, face.x);// ww  w  .  j a v  a2s  .  c o  m
        m.put(1, 0, face.y);
        m.put(2, 0, 1);
        Mat res = Mat.zeros(2, 1, CvType.CV_32FC1);
        Core.gemm(rotMat, m, 1, new Mat(), 0, res, 0);
        face.x = (int) res.get(0, 0)[0];
        face.y = (int) res.get(1, 0)[0];
        if (angle == 270 || angle == -90) {
            face.x = (int) (face.x * scale - face.width);
            face.x = face.x + face.width / 4;
            face.y = face.y + face.height / 4;
        } else if (angle == 180 || angle == -180) {
            face.x = face.x - face.width;
            face.y = face.y - face.height;
        } else if (angle == 90 || angle == -270) {
            face.y = (int) (face.y * scale - face.height);
            face.x = face.x - face.width / 4;
            face.y = face.y - face.height / 4;
        }
    }
    return faces;
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.BrightnessCorrection.GammaCorrection.java

License:Open Source License

public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images) {
        img.convertTo(img, CvType.CV_32F);
        Core.divide(img, INT_MAX, img);/*from  w  ww. j  av  a 2s . com*/
        Core.pow(img, gamma, img);
        Core.multiply(img, INT_MAX, img);
        img.convertTo(img, CvType.CV_8U);
        processed.add(img);
    }
    preProcessor.setImages(processed);
    return preProcessor;
}

From source file:ch.zhaw.facerecognitionlibrary.PreProcessor.ContrastAdjustment.HistogrammEqualization.java

License:Open Source License

public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images) {
        img.convertTo(img, CvType.CV_8U);
        Imgproc.equalizeHist(img, img);/*from w w w .  ja  v  a  2 s  .  c  o  m*/
        processed.add(img);
    }
    preProcessor.setImages(processed);
    return preProcessor;
}

From source file:ch.zhaw.facerecognitionlibrary.Recognition.Eigenfaces.java

License:Open Source License

public String recognize(Mat img, String expectedLabel) {
    // Ignore//from   w ww . j a v  a 2  s  .  c  o m
    img = img.reshape(1, 1);
    // Subtract mean
    img.convertTo(img, CvType.CV_32F);
    Core.subtract(img, Psi, img);
    // Project to subspace
    Mat projected = getFeatureVector(img);
    // Save all points of image for tSNE
    img.convertTo(img, CvType.CV_8U);
    addImage(projected, expectedLabel, true);
    //addImage(projected, expectedLabel);
    Mat distance = new Mat(Omega.rows(), 1, CvType.CV_64FC1);
    for (int i = 0; i < Omega.rows(); i++) {
        double dist = Core.norm(projected.row(0), Omega.row(i), Core.NORM_L2);
        distance.put(i, 0, dist);
    }
    Mat sortedDist = new Mat(Omega.rows(), 1, CvType.CV_8UC1);
    Core.sortIdx(distance, sortedDist, Core.SORT_EVERY_COLUMN + Core.SORT_ASCENDING);
    // Give back the name of the found person
    int index = (int) (sortedDist.get(0, 0)[0]);
    return labelMap.getKey(labelList.get(index));
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private Mat applyRayCastingSegmentation() {
    //Mat cannyEdges = CoreOperations.cannyFilter(sourceImage, 26, 58);
    Mat contours = new Mat(preparedImage.rows(), preparedImage.cols(), CvType.CV_32S);
    int contoursCount = /*neurons.size();*/ CoreOperations
            .drawAllContours(CoreOperations.erode(preparedImage, 5), contours);
    Mat result = new Mat(preparedImage.rows(), preparedImage.cols(), preparedImage.type());//CoreOperations.or(CoreOperations.and(cannyEdges, CoreOperations.grayscale(preparedImage)), contours);
    //cannyEdges.release();

    //Mat markers = new Mat(contours.rows(), contours.cols(), CvType.CV_32S);
    //contours.copyTo(markers);
    contours.convertTo(contours, CvType.CV_32S);

    for (Neuron neuron : neurons) {
        int x = (int) neuron.getCenter().x;
        int y = (int) neuron.getCenter().y;
        int color = (int) preparedImage.get(y, x)[0];
        /*contours.put(y, x, color);
        contours.put(y - 2, x, color);/*from  www  . j  a  v  a  2 s.com*/
        contours.put(y + 2, x, color);
        contours.put(y, x - 2, color);
        contours.put(y, x + 2, color);*/
        Imgproc.circle(contours, neuron.getCenter(), (int) (0.4f * neuron.getRadius()), new Scalar(color), -1);
    }

    Imgproc.watershed(sourceImage, contours);

    for (int i = 0; i < contours.rows(); i++) {
        for (int j = 0; j < contours.cols(); j++) {
            int index = (int) contours.get(i, j)[0];
            if (index == -1) {
                result.put(i, j, 0, 0, 0);
            } else if (index <= 0 || index > contoursCount) {
                result.put(i, j, 0, 0, 0);
            } else {
                if (index == 255) {
                    result.put(i, j, 0, 0, 0/*sourceImage.get(i, j)*/);
                } else {
                    result.put(i, j, index, index, index);
                }
            }
        }
    }

    result = CoreOperations.erode(result, 2);
    result = CoreOperations.dilate(result, 3);

    contours.release();

    contours = sourceImage.clone();
    CoreOperations.drawAllContours(result, contours);

    return contours;
}

From source file:com.oetermann.imageclassifier.MatchFinderWrapper.java

License:Open Source License

public int bestMatch(Mat queryDescriptors, int minMatches) {
    queryDescriptors.convertTo(queryDescriptors, CvType.CV_32F);
    MatOfDMatch matches = new MatOfDMatch();
    matcher.match(queryDescriptors, matches);
    queryDescriptors.empty(); // Attempt to stop GC from releasing mat
    Arrays.fill(matchesPerImage, 0);
    DMatch[] matchesArray = matches.toArray();
    for (DMatch match : matchesArray) {
        //            match.distance;
        if (match.distance > 1) {
            match.distance = match.distance / 1000;
        }/*from   w  ww  .  j a v  a2s . c  om*/
        if (match.distance < 1) {
            matchesPerImage[match.imgIdx] += 1 - match.distance;
        }
        //            matchesPerImage[match.imgIdx] += 1;
        //            System.out.println("MatchDistance: "+match.distance + "\t\tImage: "+ imageNames[match.imgIdx]);
    }
    int index = 0;
    for (int i = 0; i < matchesPerImage.length; i++) {
        //            System.out.println(matchesPerImage[i] + "\t\tmatches for image " + imageNames[i]);
        if (matchesPerImage[i] > matchesPerImage[index]) {
            index = i;
        }
    }
    //        System.out.println("Total Matches: "+matches.size());
    if (matchesPerImage[index] >= minMatches) {
        return index;
    }
    return -1;
}

From source file:com.seleniumtests.util.imaging.ImageDetector.java

License:Apache License

/**
 * Compute the rectangle where the searched picture is and the rotation angle between both images
 * Throw {@link ImageSearchException} if picture is not found
 * @return/*w w  w  .  j  a v a  2s. c  om*/
 * @Deprecated Kept here for information, but open CV 3 does not include SURF anymore for java build
 */
public void detectCorrespondingZone() {
    Mat objectImageMat = Imgcodecs.imread(objectImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat sceneImageMat = Imgcodecs.imread(sceneImage.getAbsolutePath(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.SURF);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();

    surf.detect(objectImageMat, objectKeyPoints);
    surf.detect(sceneImageMat, sceneKeyPoints);

    DescriptorExtractor surfExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    Mat objectDescriptor = new Mat();
    Mat sceneDescriptor = new Mat();
    surfExtractor.compute(objectImageMat, objectKeyPoints, objectDescriptor);
    surfExtractor.compute(sceneImageMat, sceneKeyPoints, sceneDescriptor);

    try {
        Mat outImage = new Mat();
        Features2d.drawKeypoints(objectImageMat, objectKeyPoints, outImage);
        String tempFile = File.createTempFile("img", ".png").getAbsolutePath();
        writeComparisonPictureToFile(tempFile, outImage);
    } catch (IOException e) {

    }

    // http://stackoverflow.com/questions/29828849/flann-for-opencv-java
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    MatOfDMatch matches = new MatOfDMatch();

    if (objectKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException("No keypoints in object to search, check it's not uniformly coloured: "
                + objectImage.getAbsolutePath());
    }
    if (sceneKeyPoints.toList().isEmpty()) {
        throw new ImageSearchException(
                "No keypoints in scene, check it's not uniformly coloured: " + sceneImage.getAbsolutePath());
    }
    if (objectDescriptor.type() != CvType.CV_32F) {
        objectDescriptor.convertTo(objectDescriptor, CvType.CV_32F);
    }
    if (sceneDescriptor.type() != CvType.CV_32F) {
        sceneDescriptor.convertTo(sceneDescriptor, CvType.CV_32F);
    }

    matcher.match(objectDescriptor, sceneDescriptor, matches);

    double maxDist = 0;
    double minDist = 10000;

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        double dist = matches.toList().get(i).distance;
        if (dist < minDist) {
            minDist = dist;
        }
        if (dist > maxDist) {
            maxDist = dist;
        }
    }

    logger.debug("-- Max dist : " + maxDist);
    logger.debug("-- Min dist : " + minDist);

    LinkedList<DMatch> goodMatches = new LinkedList<>();
    MatOfDMatch gm = new MatOfDMatch();

    for (int i = 0; i < objectDescriptor.rows(); i++) {
        if (matches.toList().get(i).distance < detectionThreshold) {
            goodMatches.addLast(matches.toList().get(i));
        }
    }
    gm.fromList(goodMatches);

    Features2d.drawMatches(objectImageMat, objectKeyPoints, sceneImageMat, sceneKeyPoints, gm, imgMatch,
            Scalar.all(-1), Scalar.all(-1), new MatOfByte(), Features2d.NOT_DRAW_SINGLE_POINTS);

    if (goodMatches.isEmpty()) {
        throw new ImageSearchException("Cannot find matching zone");
    }

    LinkedList<Point> objList = new LinkedList<>();
    LinkedList<Point> sceneList = new LinkedList<>();

    List<KeyPoint> objectKeyPointsList = objectKeyPoints.toList();
    List<KeyPoint> sceneKeyPointsList = sceneKeyPoints.toList();

    for (int i = 0; i < goodMatches.size(); i++) {
        objList.addLast(objectKeyPointsList.get(goodMatches.get(i).queryIdx).pt);
        sceneList.addLast(sceneKeyPointsList.get(goodMatches.get(i).trainIdx).pt);
    }

    MatOfPoint2f obj = new MatOfPoint2f();
    obj.fromList(objList);

    MatOfPoint2f scene = new MatOfPoint2f();
    scene.fromList(sceneList);

    // Calib3d.RANSAC could be used instead of 0
    Mat hg = Calib3d.findHomography(obj, scene, 0, 5);

    Mat objectCorners = new Mat(4, 1, CvType.CV_32FC2);
    Mat sceneCorners = new Mat(4, 1, CvType.CV_32FC2);

    objectCorners.put(0, 0, new double[] { 0, 0 });
    objectCorners.put(1, 0, new double[] { objectImageMat.cols(), 0 });
    objectCorners.put(2, 0, new double[] { objectImageMat.cols(), objectImageMat.rows() });
    objectCorners.put(3, 0, new double[] { 0, objectImageMat.rows() });

    Core.perspectiveTransform(objectCorners, sceneCorners, hg);

    // points of object
    Point po1 = new Point(objectCorners.get(0, 0));
    Point po2 = new Point(objectCorners.get(1, 0));
    Point po3 = new Point(objectCorners.get(2, 0));
    Point po4 = new Point(objectCorners.get(3, 0));

    // point of object in scene
    Point p1 = new Point(sceneCorners.get(0, 0)); // top left
    Point p2 = new Point(sceneCorners.get(1, 0)); // top right
    Point p3 = new Point(sceneCorners.get(2, 0)); // bottom right
    Point p4 = new Point(sceneCorners.get(3, 0)); // bottom left

    logger.debug(po1);
    logger.debug(po2);
    logger.debug(po3);
    logger.debug(po4);
    logger.debug(p1); // top left
    logger.debug(p2); // top right
    logger.debug(p3); // bottom right
    logger.debug(p4); // bottom left

    if (debug) {
        try {
            // translate corners
            p1.set(new double[] { p1.x + objectImageMat.cols(), p1.y });
            p2.set(new double[] { p2.x + objectImageMat.cols(), p2.y });
            p3.set(new double[] { p3.x + objectImageMat.cols(), p3.y });
            p4.set(new double[] { p4.x + objectImageMat.cols(), p4.y });

            Imgproc.line(imgMatch, p1, p2, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p2, p3, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p3, p4, new Scalar(0, 255, 0), 1);
            Imgproc.line(imgMatch, p4, p1, new Scalar(0, 255, 0), 1);

            showResultingPicture(imgMatch);
        } catch (IOException e) {
        }
    }

    // check rotation angles
    checkRotationAngle(p1, p2, p3, p4, po1, po2, po3, po4);

    // rework on scene points as new, we are sure the object rotation is 0, 90, 180 or 270
    reworkOnScenePoints(p1, p2, p3, p4);

    // check that aspect ratio of the detected height and width are the same
    checkDetectionZoneAspectRatio(p1, p2, p4, po1, po2, po4);

    recordDetectedRectangle(p1, p2, p3, p4);
}