Example usage for org.opencv.core Mat put

List of usage examples for org.opencv.core Mat put

Introduction

In this page you can find the example usage for org.opencv.core Mat put.

Prototype

public int put(int row, int col, byte[] data) 

Source Link

Usage

From source file:video.PictureView.java

public static Mat bufferedImageToMat(BufferedImage in) {
    Mat out;
    byte[] data;/*from  w w  w.j av a2s  .c o m*/
    int r, g, b;
    int height = in.getHeight();
    int width = in.getWidth();
    if (in.getType() == BufferedImage.TYPE_INT_RGB || in.getType() == BufferedImage.TYPE_INT_ARGB) {
        out = new Mat(height, width, CvType.CV_8UC3);
        data = new byte[height * width * (int) out.elemSize()];
        int[] dataBuff = in.getRGB(0, 0, width, height, null, 0, width);
        for (int i = 0; i < dataBuff.length; i++) {
            data[i * 3 + 2] = (byte) ((dataBuff[i] >> 16) & 0xFF);
            data[i * 3 + 1] = (byte) ((dataBuff[i] >> 8) & 0xFF);
            data[i * 3] = (byte) ((dataBuff[i]) & 0xFF);
        }
    } else if (in.getType() == BufferedImage.TYPE_3BYTE_BGR) {
        out = new Mat(height, width, CvType.CV_8UC3);
        data = new byte[height * width * (int) out.elemSize()];
        int[] dataBuff = in.getRGB(0, 0, width, height, null, 0, width);
        for (int i = 0; i < dataBuff.length; i++) {
            data[i * 3 + 2] = (byte) ((dataBuff[i]) & 0xFF);
            data[i * 3 + 1] = (byte) ((dataBuff[i] >> 8) & 0xFF);
            data[i * 3] = (byte) ((dataBuff[i] >> 16) & 0xFF);
        }
    } else {
        out = new Mat(height, width, CvType.CV_8UC1);
        data = new byte[height * width * (int) out.elemSize()];
        int[] dataBuff = in.getRGB(0, 0, width, height, null, 0, width);
        for (int i = 0; i < dataBuff.length; i++) {
            r = (byte) ((dataBuff[i] >> 16) & 0xFF);
            g = (byte) ((dataBuff[i] >> 8) & 0xFF);
            b = (byte) ((dataBuff[i]) & 0xFF);
            data[i] = (byte) ((0.21 * r) + (0.71 * g) + (0.07 * b)); //luminosity
        }
    }
    out.put(0, 0, data);
    return out;
}

From source file:View.Signature.java

public static int sift(String routeVal, String route, String n_img1, String n_img2, String extension) {

    String bookObject = routeVal + n_img2 + extension;
    String bookScene = route + n_img1 + extension;

    //System.out.println("Iniciando SIFT");
    //java.lang.System.out.print("Abriendo imagenes | ");
    Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR);
    Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT);
    //java.lang.System.out.print("Encontrar keypoints con SIFT | ");  
    featureDetector.detect(objectImage, objectKeyPoints);
    KeyPoint[] keypoints = objectKeyPoints.toArray();

    MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
    DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT);
    //java.lang.System.out.print("Computando descriptores | ");  
    descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

    // Create the matrix for output image.   
    Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar newKeypointColor = new Scalar(255, 0, 0);

    //java.lang.System.out.print("Dibujando keypoints en imagen base | ");  
    Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

    // Match object image with the scene image  
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
    //java.lang.System.out.print("Detectando keypoints en imagen base | ");
    featureDetector.detect(sceneImage, sceneKeyPoints);
    //java.lang.System.out.print("Computando descriptores en imagen base | ");
    descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

    Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar matchestColor = new Scalar(0, 255, 0);

    List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>();
    DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    //java.lang.System.out.print("Encontrando matches entre imagenes | ");  
    descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

    //java.lang.System.out.println("Calculando buenos matches");
    LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

    float nndrRatio = 0.7f;
    java.lang.System.out.println(matches.size());
    for (int i = 0; i < matches.size(); i++) {
        MatOfDMatch matofDMatch = matches.get(i);
        DMatch[] dmatcharray = matofDMatch.toArray();
        DMatch m1 = dmatcharray[0];/*from   ww w  .  j  ava  2s  .co  m*/
        DMatch m2 = dmatcharray[1];

        if (m1.distance <= m2.distance * nndrRatio) {
            goodMatchesList.addLast(m1);

        }
    }

    if (goodMatchesList.size() >= 7) {
        //java.lang.System.out.println("Match enontrado!!! Matches: "+goodMatchesList.size());
        //if(goodMatchesList.size()>max){

        //cambio = 1;
        //}    

        List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
        List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

        LinkedList<Point> objectPoints = new LinkedList<>();
        LinkedList<Point> scenePoints = new LinkedList<>();

        for (int i = 0; i < goodMatchesList.size(); i++) {
            objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
            scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
        }

        MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
        objMatOfPoint2f.fromList(objectPoints);
        MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
        scnMatOfPoint2f.fromList(scenePoints);

        Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

        Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
        Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

        obj_corners.put(0, 0, new double[] { 0, 0 });
        obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
        obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
        obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

        //System.out.println("Transforming object corners to scene corners...");  
        Core.perspectiveTransform(obj_corners, scene_corners, homography);

        Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR);

        Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                new Scalar(0, 255, 0), 4);

        //java.lang.System.out.println("Dibujando imagen de coincidencias");
        MatOfDMatch goodMatches = new MatOfDMatch();
        goodMatches.fromList(goodMatchesList);

        Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);
        String n_outputImage = route + "results\\" + n_img2 + "_outputImage_sift" + extension;
        String n_matchoutput = route + "results\\" + n_img2 + "_matchoutput_sift" + extension;
        String n_img = route + "results\\" + n_img2 + "_sift" + extension;
        Highgui.imwrite(n_outputImage, outputImage);
        Highgui.imwrite(n_matchoutput, matchoutput);
        //Highgui.imwrite(n_img, img);  
        java.lang.System.out.println(goodMatches.size().height);
        double result = goodMatches.size().height * 100 / matches.size();

        java.lang.System.out.println((int) result);
        //double result =goodMatches.size().height;
        if (result > 100) {
            return 100;
        } else if (result <= 100 && result > 85) {
            return 85;
        } else if (result <= 85 && result > 50) {
            return 50;
        } else if (result <= 50 && result > 25) {
            return 25;
        } else {
            return 0;
        }
    } else {
        //java.lang.System.out.println("Firma no encontrada");  
    }
    return 0;
    //System.out.println("Terminando SIFT");  
}

From source file:View.SignatureLib.java

public static int sift(String routeRNV, String routeAdherent) {

    String bookObject = routeAdherent;
    String bookScene = routeRNV;/*  w w w .  jav  a  2  s  .com*/

    //System.out.println("Iniciando SIFT");
    //java.lang.System.out.print("Abriendo imagenes | ");
    Mat objectImage = Highgui.imread(bookObject, Highgui.CV_LOAD_IMAGE_COLOR);
    Mat sceneImage = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR);

    MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
    FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SIFT);
    //java.lang.System.out.print("Encontrar keypoints con SIFT | ");  
    featureDetector.detect(objectImage, objectKeyPoints);
    KeyPoint[] keypoints = objectKeyPoints.toArray();

    MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
    DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SIFT);
    //java.lang.System.out.print("Computando descriptores | ");  
    descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

    // Create the matrix for output image.   
    Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar newKeypointColor = new Scalar(255, 0, 0);

    //java.lang.System.out.print("Dibujando keypoints en imagen base | ");  
    Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

    // Match object image with the scene image  
    MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
    MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
    //java.lang.System.out.print("Detectando keypoints en imagen base | ");
    featureDetector.detect(sceneImage, sceneKeyPoints);
    //java.lang.System.out.print("Computando descriptores en imagen base | ");
    descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

    Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Highgui.CV_LOAD_IMAGE_COLOR);
    Scalar matchestColor = new Scalar(0, 255, 0);

    List<MatOfDMatch> matches = new LinkedList<MatOfDMatch>();
    DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
    //java.lang.System.out.println(sceneDescriptors);  

    if (sceneDescriptors.empty()) {
        java.lang.System.out.println("Objeto no encontrado");
        return 0;
    }

    descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

    //java.lang.System.out.println("Calculando buenos matches");
    LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

    float nndrRatio = 0.7f;

    for (int i = 0; i < matches.size(); i++) {
        MatOfDMatch matofDMatch = matches.get(i);
        DMatch[] dmatcharray = matofDMatch.toArray();
        DMatch m1 = dmatcharray[0];
        DMatch m2 = dmatcharray[1];

        if (m1.distance <= m2.distance * nndrRatio) {
            goodMatchesList.addLast(m1);

        }
    }

    if (goodMatchesList.size() >= 7) {
        max = goodMatchesList.size();

        List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
        List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

        LinkedList<Point> objectPoints = new LinkedList<>();
        LinkedList<Point> scenePoints = new LinkedList<>();

        for (int i = 0; i < goodMatchesList.size(); i++) {
            objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
            scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
        }

        MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
        objMatOfPoint2f.fromList(objectPoints);
        MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
        scnMatOfPoint2f.fromList(scenePoints);

        Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

        Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
        Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

        obj_corners.put(0, 0, new double[] { 0, 0 });
        obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
        obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
        obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

        //System.out.println("Transforming object corners to scene corners...");  
        Core.perspectiveTransform(obj_corners, scene_corners, homography);

        Mat img = Highgui.imread(bookScene, Highgui.CV_LOAD_IMAGE_COLOR);

        Core.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                new Scalar(0, 255, 0), 4);
        Core.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                new Scalar(0, 255, 0), 4);

        //java.lang.System.out.println("Dibujando imagen de coincidencias");
        MatOfDMatch goodMatches = new MatOfDMatch();
        goodMatches.fromList(goodMatchesList);

        Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);

        String n_outputImage = "../pre/outputImage_sift.jpg";
        String n_matchoutput = "../pre/matchoutput_sift.jpg";
        String n_img = "../pre/sift.jpg";
        Highgui.imwrite(n_outputImage, outputImage);
        Highgui.imwrite(n_matchoutput, matchoutput);
        Highgui.imwrite(n_img, img);
        java.lang.System.out.println(goodMatches.size().height);
        double result = goodMatches.size().height;//*100/matches.size();
        int score = 0;
        if (result > 26) {
            score = 100;
        } else if (result <= 26 && result > 22) {
            score = 85;
        } else if (result <= 22 && result > 17) {
            score = 50;
        } else if (result <= 17 && result > 11) {
            score = 25;
        } else {
            score = 0;
        }
        java.lang.System.out.println("Score: " + score);
        return score;
    } else {
        java.lang.System.out.println("Objeto no encontrado");
        return 0;
    }
    //System.out.println("Terminando SIFT");  
}

From source file:vinylsleevedetection.Analyze.java

public void Check() {
    count = 1;//from w  w  w.  j a  va 2 s.  c om
    //load openCV library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    //for loop to compare source images to user image
    for (int j = 1; j < 4; j++) {
        //source image location (record sleeve)
        String Object = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Source\\" + j
                + ".jpg";
        //user image location
        String Scene = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\camera.jpg";
        //load images
        Mat objectImage = Imgcodecs.imread(Object, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Mat sceneImage = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        //use BRISK feature detection
        MatOfKeyPoint objectKeyPoints = new MatOfKeyPoint();
        FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.BRISK);
        //perform feature detection on source image
        featureDetector.detect(objectImage, objectKeyPoints);
        KeyPoint[] keypoints = objectKeyPoints.toArray();
        //use descriptor extractor
        MatOfKeyPoint objectDescriptors = new MatOfKeyPoint();
        DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK);
        descriptorExtractor.compute(objectImage, objectKeyPoints, objectDescriptors);

        Mat outputImage = new Mat(objectImage.rows(), objectImage.cols(), Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Scalar newKeypointColor = new Scalar(255, 0, 0);

        Features2d.drawKeypoints(objectImage, objectKeyPoints, outputImage, newKeypointColor, 0);

        MatOfKeyPoint sceneKeyPoints = new MatOfKeyPoint();
        MatOfKeyPoint sceneDescriptors = new MatOfKeyPoint();
        featureDetector.detect(sceneImage, sceneKeyPoints);
        descriptorExtractor.compute(sceneImage, sceneKeyPoints, sceneDescriptors);

        Mat matchoutput = new Mat(sceneImage.rows() * 2, sceneImage.cols() * 2, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Scalar matchestColor = new Scalar(0, 255, 0);

        List<MatOfDMatch> matches = new LinkedList<>();
        DescriptorMatcher descriptorMatcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
        descriptorMatcher.knnMatch(objectDescriptors, sceneDescriptors, matches, 2);

        LinkedList<DMatch> goodMatchesList = new LinkedList<DMatch>();

        float nndrRatio = 0.7f;

        for (int i = 0; i < matches.size(); i++) {
            MatOfDMatch matofDMatch = matches.get(i);
            DMatch[] dmatcharray = matofDMatch.toArray();
            DMatch m1 = dmatcharray[0];
            DMatch m2 = dmatcharray[1];

            if (m1.distance <= m2.distance * nndrRatio) {
                goodMatchesList.addLast(m1);

            }
        }
        //if the number of good mathces is more than 150 a match is found
        if (goodMatchesList.size() > 150) {
            System.out.println("Object Found");

            List<KeyPoint> objKeypointlist = objectKeyPoints.toList();
            List<KeyPoint> scnKeypointlist = sceneKeyPoints.toList();

            LinkedList<Point> objectPoints = new LinkedList<>();
            LinkedList<Point> scenePoints = new LinkedList<>();

            for (int i = 0; i < goodMatchesList.size(); i++) {
                objectPoints.addLast(objKeypointlist.get(goodMatchesList.get(i).queryIdx).pt);
                scenePoints.addLast(scnKeypointlist.get(goodMatchesList.get(i).trainIdx).pt);
            }

            MatOfPoint2f objMatOfPoint2f = new MatOfPoint2f();
            objMatOfPoint2f.fromList(objectPoints);
            MatOfPoint2f scnMatOfPoint2f = new MatOfPoint2f();
            scnMatOfPoint2f.fromList(scenePoints);

            Mat homography = Calib3d.findHomography(objMatOfPoint2f, scnMatOfPoint2f, Calib3d.RANSAC, 3);

            Mat obj_corners = new Mat(4, 1, CvType.CV_32FC2);
            Mat scene_corners = new Mat(4, 1, CvType.CV_32FC2);

            obj_corners.put(0, 0, new double[] { 0, 0 });
            obj_corners.put(1, 0, new double[] { objectImage.cols(), 0 });
            obj_corners.put(2, 0, new double[] { objectImage.cols(), objectImage.rows() });
            obj_corners.put(3, 0, new double[] { 0, objectImage.rows() });

            Core.perspectiveTransform(obj_corners, scene_corners, homography);

            Mat img = Imgcodecs.imread(Scene, Imgcodecs.CV_LOAD_IMAGE_COLOR);
            //draw a green square around the matched object
            Imgproc.line(img, new Point(scene_corners.get(0, 0)), new Point(scene_corners.get(1, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(1, 0)), new Point(scene_corners.get(2, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(2, 0)), new Point(scene_corners.get(3, 0)),
                    new Scalar(0, 255, 0), 10);
            Imgproc.line(img, new Point(scene_corners.get(3, 0)), new Point(scene_corners.get(0, 0)),
                    new Scalar(0, 255, 0), 10);

            MatOfDMatch goodMatches = new MatOfDMatch();
            goodMatches.fromList(goodMatchesList);

            Features2d.drawMatches(objectImage, objectKeyPoints, sceneImage, sceneKeyPoints, goodMatches,
                    matchoutput, matchestColor, newKeypointColor, new MatOfByte(), 2);
            //output image with match, image of the match locations and keypoints image
            String folder = "E:\\Users\\Jamie\\Documents\\NetBeansProjects\\VinylSleeveDetection\\Output\\";
            Imgcodecs.imwrite(folder + "outputImage.jpg", outputImage);
            Imgcodecs.imwrite(folder + "matchoutput.jpg", matchoutput);
            Imgcodecs.imwrite(folder + "found.jpg", img);
            count = j;
            break;
        } else {
            System.out.println("Object Not Found");
            count = 0;
        }

    }

}

From source file:xored.vpn.fixer.TokenDetector.java

private Mat addTo(Mat matA, List<Mat> mats) {
    Mat m = new Mat(matA.rows(), matA.cols(), matA.type());
    for (int i = 0; i < matA.rows(); i++) {
        for (int j = 0; j < matA.cols(); j++) {
            m.put(i, j, matA.get(i, j));
        }//from   w ww  .  j a  va2s  . c om
    }
    int xOffset = 0;
    for (Mat mat : mats) {
        for (int i = 0; i < mat.rows(); i++) {
            for (int j = 0; j < mat.cols(); j++) {
                double[] v = mat.get(i, j);
                if (v.length == 1) {
                    double[] v1 = { v[0], v[0], v[0] };
                    m.put(i, j + xOffset, v1);
                } else {
                    m.put(i, j + xOffset, v);
                }
            }
        }
        xOffset += mat.cols() + 10;
    }

    return m;
}