Example usage for org.opencv.core Mat submat

List of usage examples for org.opencv.core Mat submat

Introduction

In this page you can find the example usage for org.opencv.core Mat submat.

Prototype

public Mat submat(Rect roi) 

Source Link

Usage

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.ResultUtil.java

License:Open Source License

@NonNull
public static Mat concatenateHorizontal(@NonNull Mat m1, @NonNull Mat m2) {
    int width = m1.cols() + m2.cols() + HORIZONTAL_MARGIN;
    int height = Math.max(m1.rows(), m2.rows());

    Mat result = new Mat(height, width, CvType.CV_8UC3,
            new Scalar(MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE, MAX_RGB_INT_VALUE));

    // rect works with x, y, width, height
    Rect roi1 = new Rect(0, 0, m1.cols(), m1.rows());
    Mat roiMat1 = result.submat(roi1);
    m1.copyTo(roiMat1);//www  . j  a v a2  s .c o  m

    Rect roi2 = new Rect(m1.cols() + HORIZONTAL_MARGIN, 0, m2.cols(), m2.rows());
    Mat roiMat2 = result.submat(roi2);
    m2.copyTo(roiMat2);

    return result;
}

From source file:samples.LWF.java

private static void affine(Mat mat, double[][] from, double[][] to, double[][] coeficients, Mat lienzo,
        double escala, double gap) {
    // throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.

    //   http://stackoverflow.com/questions/10100715/opencv-warping-from-one-triangle-to-another
    //  https://www.learnopencv.com/warp-one-triangle-to-another-using-opencv-c-python/
    //   http://docs.opencv.org/2.4/doc/tutorials/imgproc/imgtrans/warp_affine/warp_affine.html
    MatOfPoint2f src_pf = new MatOfPoint2f(new Point(from[0][0], from[0][1]), new Point(from[1][0], from[1][1]),
            new Point(from[2][0], from[2][1]));
    MatOfPoint2f dst_pf = new MatOfPoint2f(new Point(to[0][0], to[0][1]), new Point(to[1][0], to[1][1]),
            new Point(to[2][0], to[2][1]));

    //  https://www.learnopencv.com/warp-one-triangle-to-another-using-opencv-c-python/#download
    //how do I set up the position numbers in MatOfPoint2f here?
    //  Mat perspective_matrix = Imgproc.getAffineTransform(src_pf, dst_pf);
    Rect r1 = Imgproc.boundingRect(new MatOfPoint(new Point(from[0][0], from[0][1]),
            new Point(from[1][0], from[1][1]), new Point(from[2][0], from[2][1])));
    Rect r2 = Imgproc.boundingRect(new MatOfPoint(new Point(to[0][0], to[0][1]), new Point(to[1][0], to[1][1]),
            new Point(to[2][0], to[2][1])));

    MatOfPoint2f tri1Cropped = new MatOfPoint2f(new Point(from[0][0] - r1.x, from[0][1] - r1.y),
            new Point(from[1][0] - r1.x, from[1][1] - r1.y), new Point(from[2][0] - r1.x, from[2][1] - r1.y));

    MatOfPoint tri2CroppedInt = new MatOfPoint(new Point(to[0][0] - r2.x, to[0][1] - r2.y),
            new Point(to[1][0] - r2.x, to[1][1] - r2.y), new Point(to[2][0] - r2.x, to[2][1] - r2.y));

    MatOfPoint2f tri2Cropped = new MatOfPoint2f(new Point((to[0][0] - r2.x), (to[0][1] - r2.y)),
            new Point((to[1][0] - r2.x), (to[1][1] - r2.y)), new Point((to[2][0] - r2.x), (to[2][1] - r2.y)));
    //        for (int i = 0; i < 3; i++) {
    //           // tri1Cropped.push_back(new MatOfPoint(new Point(from[i][0] - r1.x, from[i][1] - r1.y))); //           new Point( from[i][0]  - r1.x, from[i][1]-  r1.y) );
    //            //tri2Cropped.push_back(new MatOfPoint(new Point(to[i][0] - r2.x, to[i][1] - r2.y)));
    ////from   w ww.  java  2s .c o  m
    //            // fillConvexPoly needs a vector of Point and not Point2f
    //           // tri2CroppedInt.push_back(new MatOfPoint2f(new Point((int) (to[i][0] - r2.x), (int) (to[i][1] - r2.y))));
    //
    //        }

    // Apply warpImage to small rectangular patches
    Mat img1Cropped = mat.submat(r1);
    //img1(r1).copyTo(img1Cropped);

    // Given a pair of triangles, find the affine transform.
    Mat warpMat = Imgproc.getAffineTransform(tri1Cropped, tri2Cropped);

    //       Mat bbb = warpMat.mul(tri1Cropped);
    //        
    //       System.out.println( warpMat.dump() );
    //       System.out.println( tri2Cropped.dump() );
    //       System.out.println( bbb.dump() );
    // Apply the Affine Transform just found to the src image
    Mat img2Cropped = Mat.zeros(r2.height, r2.width, img1Cropped.type());
    Imgproc.warpAffine(img1Cropped, img2Cropped, warpMat, img2Cropped.size(), 0, Imgproc.INTER_LINEAR,
            new Scalar(Core.BORDER_TRANSPARENT)); //, 0, Imgproc.INTER_LINEAR, new Scalar(Core.BORDER_REFLECT_101));

    // Get mask by filling triangle
    Mat mask = Mat.zeros(r2.height, r2.width, CvType.CV_8UC3); ///CV_8U    CV_32FC3
    Imgproc.fillConvexPoly(mask, tri2CroppedInt, new Scalar(1.0, 1.0, 1.0), 16, 0);

    // Copy triangular region of the rectangular patch to the output image
    //         Core.multiply(img2Cropped,mask, img2Cropped);
    //         
    //         Core.multiply(mask, new Scalar(-1), mask);
    //        Core.(mask,new Scalar(gap), mask);
    //Core.multiply(lienzo.submat(r2),  (new Scalar(1.0,1.0,1.0)). - Core.multiply(mask,), lienzo.submat(r2));
    //         img2(r2) = img2(r2) + img2Cropped;
    // Core.subtract(Mat.ones(mask.height(), mask.width(), CvType.CV_8UC3), mask, mask);
    // Mat ff =   ;
    //   este
    Core.multiply(img2Cropped, mask, img2Cropped);
    //Core.multiply(lienzo.submat(r2), mask  , lienzo.submat(r2));         
    Core.add(lienzo.submat(r2), img2Cropped, lienzo.submat(r2));

    /*
     Mat bb = new Mat(mat, r2);
     bb.setTo(new Scalar(rnd.nextInt(),rnd.nextInt(),rnd.nextInt()));         
     Core.multiply(bb,mask, bb);
     Core.multiply(lienzo.submat(r2), mask  , lienzo.submat(r2));         
     Core.add(lienzo.submat(r2), bb, lienzo.submat(r2));
             
     */
    // lienzo.submat(r2).setTo(new Scalar(rnd.nextInt(),rnd.nextInt(),rnd.nextInt()));
    //         
    //      Imgproc.fillConvexPoly(lienzo, new MatOfPoint(
    //                new Point(to[0][0] , to[0][1]),
    //                new Point(to[1][0] , to[1][1]),
    //                new Point(to[2][0] , to[2][1] )), new Scalar(1,1,1));
    //        img2Cropped.copyTo(lienzo);
    //        return;
    // http://stackoverflow.com/questions/14111716/how-to-set-a-mask-image-for-grabcut-in-opencv  
    //  Imgproc.warpAffine(mat, lienzo, perspective_matrix, lienzo.size());
    // Imgproc.getAffineTransform(null, null);
    /*     
     // Find bounding rectangle for each triangle
     Rect r1 = boundingRect(tri1);
     Rect r2 = boundingRect(tri2);
            
     // Offset points by left top corner of the respective rectangles
     vector<Point2f> tri1Cropped, tri2Cropped;
     vector<Point> tri2CroppedInt;
     for(int i = 0; i < 3; i++)
     {
     tri1Cropped.push_back( Point2f( tri1[i].x - r1.x, tri1[i].y -  r1.y) );
     tri2Cropped.push_back( Point2f( tri2[i].x - r2.x, tri2[i].y - r2.y) );
            
     // fillConvexPoly needs a vector of Point and not Point2f
     tri2CroppedInt.push_back( Point((int)(tri2[i].x - r2.x), (int)(tri2[i].y - r2.y)) );
            
     }
            
     // Apply warpImage to small rectangular patches
     Mat img1Cropped;
     img1(r1).copyTo(img1Cropped);
            
     // Given a pair of triangles, find the affine transform.
     Mat warpMat = getAffineTransform( tri1Cropped, tri2Cropped );
            
     // Apply the Affine Transform just found to the src image
     Mat img2Cropped = Mat::zeros(r2.height, r2.width, img1Cropped.type());
     warpAffine( img1Cropped, img2Cropped, warpMat, img2Cropped.size(), INTER_LINEAR, BORDER_REFLECT_101);
            
     // Get mask by filling triangle
     Mat mask = Mat::zeros(r2.height, r2.width, CV_32FC3);
     fillConvexPoly(mask, tri2CroppedInt, Scalar(1.0, 1.0, 1.0), 16, 0);
            
     // Copy triangular region of the rectangular patch to the output image
     multiply(img2Cropped,mask, img2Cropped);
     multiply(img2(r2), Scalar(1.0,1.0,1.0) - mask, img2(r2));
     img2(r2) = img2(r2) + img2Cropped;*/
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

public void init(Mat frame1, Rect trackedBox) {
    // get Bounding boxes
    if (Math.min(trackedBox.width, trackedBox.height) < _params.min_win) {
        throw new IllegalArgumentException(
                "Provided trackedBox: " + trackedBox + " is too small (min " + _params.min_win + ")");
    }//from w ww  .j a  v  a  2  s . c  o m
    _grid = new Grid(frame1, trackedBox, _params.min_win);
    System.out.println("Init Created " + _grid.getSize() + " bounding boxes.");
    _grid.updateGoodBadBoxes(trackedBox, _params.num_closest_init);

    _iiRows = frame1.rows();
    _iiCols = frame1.cols();
    _iisum.create(_iiRows, _iiCols, CvType.CV_32F);
    _iisqsum.create(_iiRows, _iiCols, CvType.CV_64F);

    // correct bounding box
    _lastbox = _grid.getBestBox();

    _classifierFern.init(_grid.getTrackedBoxScales(), _rng);

    // generate DATA
    // generate POSITIVE DATA
    generatePositiveData(frame1, _params.num_warps_init, _grid);

    // Set variance threshold
    MatOfDouble stddev = new MatOfDouble();
    Core.meanStdDev(frame1.submat(_grid.getBestBox()), new MatOfDouble(), stddev);
    updateIntegralImgs(frame1);
    // this is directly half of the variance of the initial box, which will be used the the 1st stage of the classifier
    _var = (float) Math.pow(stddev.toArray()[0], 2d) * 0.5f;
    // check variance
    final double checkVar = TLDUtil.getVar(_grid.getBestBox(), _iisumJava, _iisqsumJava, _iiCols) * 0.5;
    System.out.println("Variance: " + _var + " / Check variance: " + checkVar);

    // generate NEGATIVE DATA
    final Pair<List<Pair<int[], Boolean>>, List<Mat>> negData = generateNegativeData(frame1);

    // Split Negative Ferns <features, labels=false> into Training and Testing sets (they are already shuffled)
    final int nFernsSize = negData.first.size();
    final List<Pair<int[], Boolean>> nFernsTest = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(0, nFernsSize / 2));
    final List<Pair<int[], Boolean>> nFerns = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(nFernsSize / 2, nFernsSize));

    // Split Negative NN Examples into Training and Testing sets
    final int nExSize = negData.second.size();
    final List<Mat> nExamplesTest = new ArrayList<Mat>(negData.second.subList(0, nExSize / 2));
    _nExamples = new ArrayList<Mat>(negData.second.subList(nExSize / 2, nExSize));

    //MERGE Negative Data with Positive Data and shuffle it
    final List<Pair<int[], Boolean>> fernsData = new ArrayList<Pair<int[], Boolean>>(_pFerns);
    fernsData.addAll(nFerns);
    Collections.shuffle(fernsData);

    // TRAINING
    System.out.println("Init Start Training with " + fernsData.size() + " ferns, " + _nExamples.size()
            + " nExamples, " + nFernsTest.size() + " nFernsTest, " + nExamplesTest.size() + " nExamplesTest");
    _classifierFern.trainF(fernsData, 10);
    _classifierNN.trainNN(_pExample, _nExamples);
    // Threshold evaluation on testing sets
    _classifierFern.evaluateThreshold(nFernsTest);
    _classifierNN.evaluateThreshold(nExamplesTest);
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

private TrackingStruct track(final Mat lastImg, final Mat currentImg, final BoundingBox lastBox) {
    System.out.println("[TRACK]");

    // Generate points
    final Point[] lastPoints = lastBox.points();
    if (lastPoints.length == 0) {
        System.out.println("Points not generated from lastBox: " + lastBox);
        return null;
    }/*from   w  ww .j a va 2s  .  co m*/

    // Frame-to-frame tracking with forward-backward error checking
    final Pair<Point[], Point[]> trackedPoints = _tracker.track(lastImg, currentImg, lastPoints);
    if (trackedPoints == null) {
        System.out.println("No points could be tracked.");
        return null;
    }
    if (_tracker.getMedianErrFB() > _params.tracker_stability_FBerrMax) {
        System.out.println("TRACKER too unstable. FB Median error: " + _tracker.getMedianErrFB() + " > "
                + _params.tracker_stability_FBerrMax);
        // return null;  // we hope the detection will find the pattern again
    }

    // bounding box prediction
    final BoundingBox predictedBB = lastBox.predict(trackedPoints.first, trackedPoints.second);
    if (predictedBB.x > currentImg.cols() || predictedBB.y > currentImg.rows() || predictedBB.br().x < 1
            || predictedBB.br().y < 1) {
        System.out.println("TRACKER Predicted bounding box out of range !");
        return null;
    }

    // estimate Confidence
    Mat pattern = new Mat();
    try {
        resizeZeroMeanStdev(currentImg.submat(predictedBB.intersect(currentImg)), pattern, _params.patch_size);
    } catch (Throwable t) {
        System.out.println("PredBB when failed: " + predictedBB);
    }
    //System.out.println("Confidence " + pattern.dump());      

    //Conservative Similarity
    final NNConfStruct nnConf = _classifierNN.nnConf(pattern);
    System.out.println("Tracking confidence: " + nnConf.conservativeSimilarity);

    System.out.println("[TRACK END]");
    return new TrackingStruct(nnConf.conservativeSimilarity, predictedBB, trackedPoints.first,
            trackedPoints.second);
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

/**
 * Structure the classifier into 3 stages:
 * a) patch variance//  w  ww  .  j a  v a2 s . co m
 * b) ensemble of ferns classifier
 * c) nearest neighbour
 */
private Pair<List<DetectionStruct>, List<DetectionStruct>> detect(final Mat frame) {
    System.out.println("[DETECT]");

    final List<DetectionStruct> fernClassDetected = new ArrayList<Tld.DetectionStruct>(); //dt
    final List<DetectionStruct> nnMatches = new ArrayList<Tld.DetectionStruct>(); //dbb

    // 0. Cleaning
    _boxClusterMap.clear();

    // 1. DETECTION
    final Mat img = new Mat(frame.rows(), frame.cols(), CvType.CV_8U);
    updateIntegralImgs(frame);
    Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5);

    // Apply the Variance filter TODO : Bottleneck
    int a = 0;
    for (BoundingBox box : _grid) {
        // a) speed up by doing the features/ferns check ONLY if the variance is high enough !
        if (TLDUtil.getVar(box, _iisumJava, _iisqsumJava, _iiCols) >= _var) {
            a++;
            final Mat patch = img.submat(box);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, box.scaleIdx);
            final double averagePosterior = _classifierFern.averagePosterior(allFernsHashCodes);
            _fernDetectionNegDataForLearning.put(box, allFernsHashCodes);// store for later use in learning

            // b)
            if (averagePosterior > _classifierFern.getFernPosThreshold()) {
                fernClassDetected.add(new DetectionStruct(box, allFernsHashCodes, averagePosterior, patch));
            }
        }
    }

    System.out.println(a + " Bounding boxes passed the variance filter (" + _var + ")");
    System.out.println(fernClassDetected.size() + " Initial detected from Fern Classifier");
    if (fernClassDetected.size() == 0) {
        System.out.println("[DETECT END]");
        return null;
    }

    // keep only the best
    TLDUtil.keepBestN(fernClassDetected, MAX_DETECTED, new Comparator<DetectionStruct>() {
        @Override
        public int compare(DetectionStruct detS1, DetectionStruct detS2) {
            return Double.compare(detS1.averagePosterior, detS2.averagePosterior);
        }
    });

    // 2. MATCHING using the NN classifier  c)
    for (DetectionStruct detStruct : fernClassDetected) {
        // update detStruct.patch to params.patch_size and normalise it
        Mat pattern = new Mat();
        resizeZeroMeanStdev(detStruct.patch, pattern, _params.patch_size);
        detStruct.nnConf = _classifierNN.nnConf(pattern);

        System.out.println("NNConf: " + detStruct.nnConf.relativeSimilarity + " / "
                + detStruct.nnConf.conservativeSimilarity + " Threshold: " + _classifierNN.getNNThreshold());
        // only keep valid boxes
        if (detStruct.nnConf.relativeSimilarity > _classifierNN.getNNThreshold()) {
            nnMatches.add(detStruct);
        }
    }

    System.out.println("[DETECT END]");
    return new Pair<List<DetectionStruct>, List<DetectionStruct>>(fernClassDetected, nnMatches);
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

private boolean learn(final Mat img, final List<DetectionStruct> fernClassDetected) {
    System.out.println("[LEARN]");
    Mat pattern = new Mat();
    final double stdev = resizeZeroMeanStdev(img.submat(_lastbox.intersect(img)), pattern, _params.patch_size);
    final NNConfStruct confStruct = _classifierNN.nnConf(pattern);

    if (confStruct.relativeSimilarity < 0.5) {
        System.out.println("Fast change, NOT learning");
        return false;
    }/*from w w w  . j  a va 2  s .c o  m*/
    if (Math.pow(stdev, 2) < _var) {
        System.out.println("Low variance, NOT learning");
        return false;
    }
    if (confStruct.isin.inNegSet) {
        System.out.println("Patch in negative data, NOT learning");
        return false;
    }

    // Data generation
    _grid.updateGoodBadBoxes(_lastbox, _params.num_closest_update);
    if (_grid.getGoodBoxes().length > 0) {
        generatePositiveData(img, _params.num_warps_update, _grid);
    } else {
        System.out.println("NO good boxes, NOT learning.");
        return false;
    }

    // TODO why don't we learn from the GOOD boxes too !?
    final List<Pair<int[], Boolean>> fernExamples = new ArrayList<TLDUtil.Pair<int[], Boolean>>(_pFerns);
    for (BoundingBox badBox : _grid.getBadBoxes()) {
        final int[] allFernsHashCodes = _fernDetectionNegDataForLearning.get(badBox);
        if (allFernsHashCodes != null) {
            // these are NEGATIVE examples !
            fernExamples.add(new Pair<int[], Boolean>(allFernsHashCodes, false));
        }
    }

    final List<Mat> nnExamples = new ArrayList<Mat>();
    if (fernClassDetected != null) {
        for (DetectionStruct detStruct : fernClassDetected) {
            if (_lastbox.calcOverlap(detStruct.detectedBB) < Grid.BAD_OVERLAP) {
                nnExamples.add(detStruct.patch);
            }
        }
    }

    // Classifiers update
    _classifierFern.trainF(fernExamples, 2);
    _classifierNN.trainNN(_pExample, _nExamples);

    System.out.println("[LEARN END]");
    return true;
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

/** Inputs:
 * - Image//  ww w . j a  v  a  2s. c o m
 * - bad_boxes (Boxes far from the bounding box)
 * - variance (pEx variance)
 * Outputs
 * - Negative fern features (nFerns)
 * - Negative NN examples (nExample)
 */
private Pair<List<Pair<int[], Boolean>>, List<Mat>> generateNegativeData(final Mat frame) {
    final List<Pair<int[], Boolean>> negFerns = new ArrayList<Pair<int[], Boolean>>();
    final List<Mat> negExamples = new ArrayList<Mat>();

    final List<BoundingBox> badBoxes = Arrays.asList(_grid.getBadBoxes());
    Collections.shuffle(badBoxes);
    System.out.println("ST");
    // Get Fern Features of the boxes with big variance (calculated using integral images)
    for (BoundingBox badBox : badBoxes) {
        if (TLDUtil.getVar(badBox, _iisumJava, _iisqsumJava, _iiCols) >= _var * 0.5f) {
            final Mat patch = frame.submat(badBox);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, badBox.scaleIdx);
            negFerns.add(new Pair<int[], Boolean>(allFernsHashCodes, false));
        }
    }

    // select a hard coded number of negative examples
    Iterator<BoundingBox> bbIt = badBoxes.iterator();
    for (int i = 0; i < _params.num_bad_patches && bbIt.hasNext(); i++) {
        final Mat pattern = new Mat();
        final Mat patch = frame.submat(bbIt.next());
        resizeZeroMeanStdev(patch, pattern, _params.patch_size);
        negExamples.add(pattern);
    }

    System.out.println("Negative examples generated. Ferns count: " + negFerns.size() + ". negEx count: "
            + negExamples.size());

    return new Pair<List<Pair<int[], Boolean>>, List<Mat>>(negFerns, negExamples);
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

/**
 * Generate Positive data //from   ww w  .j  av a  2s . c  om
 * Inputs: 
 * - good_boxes 
 * - best_box 
 * - bbhull
 * Outputs: 
 * - Positive fern features (pFerns) 
 * - Positive NN examples (pExample)
 */
public void generatePositiveData(final Mat frame, final int numWarps, final Grid aGrid) {
    resizeZeroMeanStdev(frame.submat(aGrid.getBestBox()), _pExample, _params.patch_size);
    //Get Fern features on warped patches
    final Mat img = new Mat();
    Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5);
    final BoundingBox bbhull = aGrid.getBBhull();
    final Mat warped = img.submat(bbhull);
    // centre of the hull
    final Point pt = new Point(bbhull.x + (bbhull.width - 1) * 0.5f, bbhull.y + (bbhull.height - 1) * 0.5f);

    _pFerns.clear();
    _pPatterns.clear();

    for (int i = 0; i < numWarps; i++) {
        if (i > 0) {
            // this is important as it introduces the necessary noise / fuziness in the initial examples such that the Fern classifier recognises similar shapes not only Exact ones ! 
            // warped is a reference to a subset of the img data, so this will affect the img object
            _patchGenerator.generate(frame, pt, warped, bbhull.size(), _rng);
        }

        final BoundingBox[] goodBoxes = aGrid.getGoodBoxes();
        for (BoundingBox goodBox : goodBoxes) {
            final Mat patch = img.submat(goodBox);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, goodBox.scaleIdx);
            _pFerns.add(new Pair<int[], Boolean>(allFernsHashCodes, true));

            //            // this will be used for display only
            //            final Mat tempPattern = new Mat();
            //            Imgproc.resize(patch, tempPattern, new Size(_params.patch_size, _params.patch_size));
            //            _pPatterns.add(tempPattern);
        }
    }

    System.out.println("Positive examples generated( ferns: " + _pFerns.size() + " NN: 1/n )");
}

From source file:templatematching.ProcessFrame.java

public Image processFrame(VideoCapture capture) {

    while (k < 2) {
        capture.grab();/*from   www  . j  a  v a 2  s  .  c o m*/
        capture.retrieve(frame);
        capture.retrieve(frame);
        //treat each frame of the capture individually
        // retives the grabbed frame into Mat obj
        int frame_width = frame.rows();
        int frame_height = frame.cols();
        MatOfRect faces1 = new MatOfRect();
        Mat frame_gray = new Mat();
        Mat ImageROI;
        //change the frame to gray-scale
        Imgproc.cvtColor(frame, frame_gray, Imgproc.COLOR_BGR2GRAY);//gray scale conversion
        //use histogram equilization
        //Imgproc.equalizeHist(frame_gray, frame_gray );
        //use the face classifie
        faceHaar.detectMultiScale(frame_gray, faces1, 1.1, 2, 2, new Size(30, 30), new Size());
        Rect[] faces = faces1.toArray();

        for (int i = 0; i < faces.length; i++) {
            //  System.out.println("Processing faces");
            Point center = new Point(faces[i].x + faces[i].width * 0.5, faces[i].y + faces[i].height * 0.5);
            Imgproc.ellipse(frame, center, new Size(faces[i].width * 0.5, faces[i].height * 0.5), 0, 0, 360,
                    new Scalar(0, 0, 255), 4, 8, 0);
            Mat faceROI = frame_gray.submat(faces[i]);

            MatOfRect eyes1 = new MatOfRect();
            eyesHaar.detectMultiScale(faceROI, eyes1, 1.15, 2, 2, new Size(30, 30), new Size());
            //eyesHaar.detectMultiScale(faceROI, eyes1, 1.1, 2,  Objdetect.CASCADE_FIND_BIGGEST_OBJECT|Objdetect.CASCADE_SCALE_IMAGE, new Size(30,30),new Size());
            Rect[] eyes = eyes1.toArray();
            //  System.out.println("Processing eyes");
            for (int j = 0; j < eyes.length; j++) {

                Mat eyeROI = frame_gray.submat(eyes[i]);
                Point center1 = new Point(faces[i].x + eyes[j].x + eyes[j].width * 0.5,
                        faces[i].y + eyes[j].y + eyes[j].height * 0.5);
                int radius = (int) ((eyes[j].width + eyes[j].height) * 0.005);
                Imgproc.circle(frame, center1, radius, new Scalar(255, 0, 0), 4, 8, 0);
                Pupilx = (int) center1.x;
                Pupily = (int) center1.y;
                ROIwidth = eyes[j].width;
                ROIheight = eyes[j].height;
                Point centerX[] = new Point[2];
                centerX[k] = center1;
                //performing the scaling of the coordintaes to fir to the screen dimensions
                if (k == 0) {
                    scaledPupilx = Pupilx;
                    scaledPupily = Pupily;
                    k++;
                } else {
                    System.out.println("In else part");
                    deltax = (int) Math.abs((centerX[k].x - centerX[k - 1].x));
                    deltay = (int) Math.abs((centerX[k].y - centerX[k - 1].y));

                    scaled_deltax = (deltax * (65535 / ROIwidth));

                    scaled_deltay = (deltay * (65535 / ROIheight));
                    scaledPupilx = centerX[k - 1].x + scaled_deltax;

                    scaledPupily = centerX[k - 1].y + scaled_deltay;
                }
                if (k == 2)
                    k = 0;
                //set the cursor position to the scaled coordinates
                try {
                    Robot robot = new Robot();

                    robot.mouseMove((int) (1366 - scaledPupilx), (int) (768 - scaledPupily));
                } catch (AWTException ex) {

                }
            }
            //define a window for displaying the frame

            //release window if any key is hit

        }

        MatOfByte mem = new MatOfByte();
        Imgcodecs.imencode(".bmp", frame, mem);
        Image im = null;
        try {
            im = ImageIO.read(new ByteArrayInputStream(mem.toArray()));

        } catch (IOException ex) {
            ex.printStackTrace();
        }
        return im;
    }
    return null;

}

From source file:uk.ac.horizon.artcodes.process.BlurDetectionFilter.java

License:Open Source License

@Override
public void process(ImageBuffers buffers) {
    Mat greyImage = buffers.getImageInGrey();

    Mat dst = new Mat();

    long start = System.currentTimeMillis();

    int roiSize = Math.min(greyImage.rows(), greyImage.cols()) / 2;
    Imgproc.Laplacian(greyImage.submat(
            new Rect((greyImage.cols() - roiSize) / 2, (greyImage.rows() - roiSize) / 2, roiSize, roiSize)),
            dst, CvType.CV_16S);/*  w  w  w .j  av a2s  .  c o m*/
    MatOfDouble mean = new MatOfDouble();
    MatOfDouble stdDev = new MatOfDouble();
    Core.meanStdDev(dst, mean, stdDev);

    long end = System.currentTimeMillis();

    //Log.i("STDDEV", "StdDev: "+Math.pow(stdDev.get(0,0)[0],2)+ " (took: " + (end-start) + "ms)");

    double blurScore = Math.pow(stdDev.get(0, 0)[0], 2);

    /*
    Mat overlay = buffers.getOverlay();
    String text = "b.score: " + (int)blurScore + " ("+(end-start)+"ms)";
    int y = overlay.rows()-50;
    int x = 50;
    Imgproc.putText(overlay, text, new Point(x,y), Core.FONT_HERSHEY_SIMPLEX, 1, new Scalar(0,0,0,0), 5);
    Imgproc.putText(overlay, text, new Point(x,y), Core.FONT_HERSHEY_SIMPLEX, 1, new Scalar(255,255,255,255), 3);
    */

    // if image is blurry
    if (blurScore <= 100) {
        // tell camera to focus
        Log.i("FOCUS", "Blur detector requesting auto focus with b.score of " + (int) blurScore);
        this.cameraFocusControl.focus(new Runnable() {
            @Override
            public void run() {

            }
        });
    }

}