Example usage for org.opencv.core Mat submat

List of usage examples for org.opencv.core Mat submat

Introduction

In this page you can find the example usage for org.opencv.core Mat submat.

Prototype

public Mat submat(Rect roi) 

Source Link

Usage

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

/**
 * Structure the classifier into 3 stages:
 * a) patch variance/*from  ww w . ja v a  2  s  .  c  o  m*/
 * b) ensemble of ferns classifier
 * c) nearest neighbour
 */
private Pair<List<DetectionStruct>, List<DetectionStruct>> detect(final Mat frame) {
    Log.i(Util.TAG, "[DETECT]");

    final List<DetectionStruct> fernClassDetected = new ArrayList<Tld.DetectionStruct>(); //dt
    final List<DetectionStruct> nnMatches = new ArrayList<Tld.DetectionStruct>(); //dbb

    // 0. Cleaning
    _boxClusterMap.clear();

    // 1. DETECTION
    final Mat img = new Mat(frame.rows(), frame.cols(), CvType.CV_8U);
    updateIntegralImgs(frame);
    Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5);

    // Apply the Variance filter TODO : Bottleneck
    int a = 0;
    for (BoundingBox box : _grid) {
        // a) speed up by doing the features/ferns check ONLY if the variance is high enough !
        if (Util.getVar(box, _iisumJava, _iisqsumJava, _iiCols) >= _var) {
            a++;
            final Mat patch = img.submat(box);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, box.scaleIdx);
            final double averagePosterior = _classifierFern.averagePosterior(allFernsHashCodes);
            _fernDetectionNegDataForLearning.put(box, allFernsHashCodes);// store for later use in learning

            // b)
            if (averagePosterior > _classifierFern.getFernPosThreshold()) {
                fernClassDetected.add(new DetectionStruct(box, allFernsHashCodes, averagePosterior, patch));
            }
        }
    }

    Log.i(Util.TAG, a + " Bounding boxes passed the variance filter (" + _var + ")");
    Log.i(Util.TAG, fernClassDetected.size() + " Initial detected from Fern Classifier");
    if (fernClassDetected.size() == 0) {
        Log.i(Util.TAG, "[DETECT END]");
        return null;
    }

    // keep only the best
    Util.keepBestN(fernClassDetected, MAX_DETECTED, new Comparator<DetectionStruct>() {
        @Override
        public int compare(DetectionStruct detS1, DetectionStruct detS2) {
            return Double.compare(detS1.averagePosterior, detS2.averagePosterior);
        }
    });

    // 2. MATCHING using the NN classifier  c)
    for (DetectionStruct detStruct : fernClassDetected) {
        // update detStruct.patch to params.patch_size and normalise it
        Mat pattern = new Mat();
        resizeZeroMeanStdev(detStruct.patch, pattern, _params.patch_size);
        detStruct.nnConf = _classifierNN.nnConf(pattern);

        Log.i(Util.TAG, "NNConf: " + detStruct.nnConf.relativeSimilarity + " / "
                + detStruct.nnConf.conservativeSimilarity + " Threshold: " + _classifierNN.getNNThreshold());
        // only keep valid boxes
        if (detStruct.nnConf.relativeSimilarity > _classifierNN.getNNThreshold()) {
            nnMatches.add(detStruct);
        }
    }

    Log.i(Util.TAG, "[DETECT END]");
    return new Pair<List<DetectionStruct>, List<DetectionStruct>>(fernClassDetected, nnMatches);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

private boolean learn(final Mat img, final List<DetectionStruct> fernClassDetected) {
    Log.i(Util.TAG, "[LEARN]");
    Mat pattern = new Mat();
    final double stdev = resizeZeroMeanStdev(img.submat(_lastbox.intersect(img)), pattern, _params.patch_size);
    final NNConfStruct confStruct = _classifierNN.nnConf(pattern);

    if (confStruct.relativeSimilarity < 0.5) {
        Log.w(Util.TAG, "Fast change, NOT learning");
        return false;
    }//from  ww  w .  j  av  a  2  s. c o m
    if (Math.pow(stdev, 2) < _var) {
        Log.w(Util.TAG, "Low variance, NOT learning");
        return false;
    }
    if (confStruct.isin.inNegSet) {
        Log.w(Util.TAG, "Patch in negative data, NOT learning");
        return false;
    }

    // Data generation
    _grid.updateGoodBadBoxes(_lastbox, _params.num_closest_update);
    if (_grid.getGoodBoxes().length > 0) {
        generatePositiveData(img, _params.num_warps_update, _grid);
    } else {
        Log.w(Util.TAG, "NO good boxes, NOT learning.");
        return false;
    }

    // TODO why don't we learn from the GOOD boxes too !?
    final List<Pair<int[], Boolean>> fernExamples = new ArrayList<Util.Pair<int[], Boolean>>(_pFerns);
    for (BoundingBox badBox : _grid.getBadBoxes()) {
        final int[] allFernsHashCodes = _fernDetectionNegDataForLearning.get(badBox);
        if (allFernsHashCodes != null) {
            // these are NEGATIVE examples !
            fernExamples.add(new Pair<int[], Boolean>(allFernsHashCodes, false));
        }
    }

    final List<Mat> nnExamples = new ArrayList<Mat>();
    if (fernClassDetected != null) {
        for (DetectionStruct detStruct : fernClassDetected) {
            if (_lastbox.calcOverlap(detStruct.detectedBB) < Grid.BAD_OVERLAP) {
                nnExamples.add(detStruct.patch);
            }
        }
    }

    // Classifiers update
    _classifierFern.trainF(fernExamples, 2);
    _classifierNN.trainNN(_pExample, _nExamples);

    Log.i(Util.TAG, "[LEARN END]");
    return true;
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

/** Inputs:
 * - Image/*from   www  .j  a v  a2 s  .com*/
 * - bad_boxes (Boxes far from the bounding box)
 * - variance (pEx variance)
 * Outputs
 * - Negative fern features (nFerns)
 * - Negative NN examples (nExample)
 */
private Pair<List<Pair<int[], Boolean>>, List<Mat>> generateNegativeData(final Mat frame) {
    final List<Pair<int[], Boolean>> negFerns = new ArrayList<Pair<int[], Boolean>>();
    final List<Mat> negExamples = new ArrayList<Mat>();

    final List<BoundingBox> badBoxes = Arrays.asList(_grid.getBadBoxes());
    Collections.shuffle(badBoxes);
    Log.w(Util.TAG, "ST");
    // Get Fern Features of the boxes with big variance (calculated using integral images)
    for (BoundingBox badBox : badBoxes) {
        if (Util.getVar(badBox, _iisumJava, _iisqsumJava, _iiCols) >= _var * 0.5f) {
            final Mat patch = frame.submat(badBox);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, badBox.scaleIdx);
            negFerns.add(new Pair<int[], Boolean>(allFernsHashCodes, false));
        }
    }

    // select a hard coded number of negative examples
    Iterator<BoundingBox> bbIt = badBoxes.iterator();
    for (int i = 0; i < _params.num_bad_patches && bbIt.hasNext(); i++) {
        final Mat pattern = new Mat();
        final Mat patch = frame.submat(bbIt.next());
        resizeZeroMeanStdev(patch, pattern, _params.patch_size);
        negExamples.add(pattern);
    }

    Log.i(Util.TAG, "Negative examples generated. Ferns count: " + negFerns.size() + ". negEx count: "
            + negExamples.size());

    return new Pair<List<Pair<int[], Boolean>>, List<Mat>>(negFerns, negExamples);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

/**
 * Generate Positive data //from  ww w .  j a va 2 s  .c om
 * Inputs: 
 * - good_boxes 
 * - best_box 
 * - bbhull
 * Outputs: 
 * - Positive fern features (pFerns) 
 * - Positive NN examples (pExample)
 */
void generatePositiveData(final Mat frame, final int numWarps, final Grid aGrid) {
    resizeZeroMeanStdev(frame.submat(aGrid.getBestBox()), _pExample, _params.patch_size);
    //Get Fern features on warped patches
    final Mat img = new Mat();
    Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5);
    final BoundingBox bbhull = aGrid.getBBhull();
    final Mat warped = img.submat(bbhull);
    // centre of the hull
    final Point pt = new Point(bbhull.x + (bbhull.width - 1) * 0.5f, bbhull.y + (bbhull.height - 1) * 0.5f);

    _pFerns.clear();
    _pPatterns.clear();

    for (int i = 0; i < numWarps; i++) {
        if (i > 0) {
            // this is important as it introduces the necessary noise / fuziness in the initial examples such that the Fern classifier recognises similar shapes not only Exact ones ! 
            // warped is a reference to a subset of the img data, so this will affect the img object
            _patchGenerator.generate(frame, pt, warped, bbhull.size(), _rng);
        }

        final BoundingBox[] goodBoxes = aGrid.getGoodBoxes();
        for (BoundingBox goodBox : goodBoxes) {
            final Mat patch = img.submat(goodBox);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, goodBox.scaleIdx);
            _pFerns.add(new Pair<int[], Boolean>(allFernsHashCodes, true));

            //            // this will be used for display only
            //            final Mat tempPattern = new Mat();
            //            Imgproc.resize(patch, tempPattern, new Size(_params.patch_size, _params.patch_size));
            //            _pPatterns.add(tempPattern);
        }
    }

    Log.i(Util.TAG, "Positive examples generated( ferns: " + _pFerns.size() + " NN: 1/n )");
}

From source file:com.wallerlab.compcellscope.MultiModeViewActivity.java

License:BSD License

public Mat generateMMFrame(Mat gridOut, Mat MatTL, Mat MatTR, Mat MatBL, Mat MatBR) {
    //gridOut = new Mat(100, 100, gridOut.type(), new Scalar(0,0,0));
    Mat Mat1 = new Mat(MatTL.size(), MatTL.type());
    Mat Mat2 = new Mat(MatTR.size(), MatTR.type());
    Mat Mat3 = new Mat(MatBL.size(), MatBL.type());
    Mat Mat4 = new Mat(MatBR.size(), MatBR.type());

    // Ensure all of the mats are of the correct size since pyramid operation resizes
    Imgproc.resize(MatTL, MatTL, sz);/*from  w w  w  . ja  v  a 2s. c  o  m*/
    Imgproc.resize(MatTR, MatTR, sz);
    Imgproc.resize(MatBL, MatBL, sz);
    Imgproc.resize(MatBR, MatBR, sz);

    // Downsample by 2 for 2x2 grid
    Imgproc.pyrDown(MatBL, Mat1);
    Imgproc.pyrDown(MatBR, Mat2);
    Imgproc.pyrDown(MatTL, Mat3);
    Imgproc.pyrDown(MatTR, Mat4);

    /*
    Log.d(TAG,String.format("TLRect format is %.1f-%.1f",TLRect.size().width,TLRect.size().height));
    Log.d(TAG,String.format("TRRect format is %.1f-%.1f",TRRect.size().width,TRRect.size().height));
            
    Log.d(TAG,String.format("BLRect format is %.1f-%.1f",BLRect.size().width,BLRect.size().height));
    Log.d(TAG,String.format("BRRect format is %.1f-%.1f",BRRect.size().width,BRRect.size().height));
            
    Log.d(TAG,String.format("MatTL format is %.1f-%.1f",MatTL.size().width,MatTL.size().height));
    Log.d(TAG,String.format("MatTR format is %.1f-%.1f",MatTR.size().width,MatTR.size().height));
            
    Log.d(TAG,String.format("MatBL format is %.1f-%.1f",MatBL.size().width,MatBL.size().height));
    Log.d(TAG,String.format("MatBR format is %.1f-%.1f",MatBR.size().width,MatBR.size().height));
     */

    Core.putText(Mat1, "DPC-LR", new Point(43, 40), Core.FONT_ITALIC, 1, new Scalar(255, 255, 0));
    Core.putText(Mat2, "DPC-TB", new Point(43, 40), Core.FONT_ITALIC, 1, new Scalar(255, 255, 0));
    Core.putText(Mat3, "BrightField", new Point(33, 40), Core.FONT_ITALIC, 1, new Scalar(255, 255, 0));
    Core.putText(Mat4, "DarkField", new Point(37, 40), Core.FONT_ITALIC, 1, new Scalar(255, 255, 0));

    Mat1.copyTo(gridOut.submat(BLRect));
    Mat2.copyTo(gridOut.submat(BRRect));
    Mat3.copyTo(gridOut.submat(TLRect));
    Mat4.copyTo(gridOut.submat(TRRect));

    Mat1.release();
    Mat2.release();
    Mat3.release();
    Mat4.release();

    return gridOut;
}

From source file:depthDataFromStereoCamsOpenCV.ProcessImages.java

/**
 * Trims image by trimSize//from  ww w .  j  ava 2  s .c  o m
 * @param image
 * @param trimSize
 * @return
 */
public static Mat cropImageHorizontal(Mat image, int trimSize) {

    //         System.out.println("Initial image width "+image.width());
    //         System.out.println("Initial image height "+image.height());

    Rect roi = new Rect(2 * trimSize, 0, image.width() - 4 * trimSize, image.height());

    Mat result = image.submat(roi);

    //         System.out.println("Trimmed image width "+ result.width());
    //         System.out.println("Trimmed image height "+result.height());
    //         displayImage(ProcessImages.Mat2BufferedImage(result),"Cropped  Image");
    return result;

}

From source file:imageanalysis.Analyzer.java

private Mat findDifferences() {
    Mat image = ImgTools.getImageFromClipboard();

    // Gets images (both halves)
    Mat leftHalf = image.submat(left);
    Mat rightHalf = image.submat(right);

    // Computes their difference
    Mat diff1 = new Mat();
    Mat diff2 = new Mat();
    Core.subtract(leftHalf, rightHalf, diff1);
    Core.subtract(rightHalf, leftHalf, diff2);

    // Gets sum of both differences (image that highlightes different objects)
    Mat sum = new Mat(diff1.size(), CvType.CV_32F);
    Core.add(diff1, diff2, sum);/*from  w  w  w .jav  a2  s .co  m*/
    // Normalize
    Core.normalize(sum, sum, 0, 255, Core.NORM_MINMAX);
    sum.convertTo(sum, CvType.CV_8U);

    return sum;

}

From source file:javaapplication1.Ocv.java

public void makeFacesGray(String filter, String input, String output) {
    // load the filter and create a classifier with it
    File f = new File(filter);
    final CascadeClassifier faceDetector = new CascadeClassifier(this.filter);

    // load the image and read it into a matrix
    File f2 = new File(input);
    final Mat image = Highgui.imread(this.input);

    // run a face detector on the image
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    // inform about faces detected
    System.out.println(String.format("Detected %s faces", faceDetections.toArray().length));

    // make each face gray
    for (Rect rect : faceDetections.toArray()) {
        // get a shallow copy of the submatrix for the face
        Mat sub = image.submat(rect);
        // convert it to gray, then back to BGR
        Imgproc.cvtColor(sub, sub, Imgproc.COLOR_BGR2GRAY, 1);
        Imgproc.cvtColor(sub, sub, Imgproc.COLOR_GRAY2BGR, 3);
        // copy back to the original image
        sub.copyTo(image.submat(rect));//from   w ww . ja  v a 2  s  .c  o m
    }

    // save file
    Highgui.imwrite(this.output, image);
}

From source file:logic.helpclass.Util.java

/**
 * Track template within the image/*  w  w w  .j a va  2s. c om*/
 * @param grayFrame
 * @param rect
 * @param temp
 * @return 
 */
static public Rect trackTemplate(Mat grayFrame, Rect rect, Mat temp) {
    Rect searchRect = new Rect(new Point(rect.x - rect.width / 2, rect.y - rect.height / 2),
            new Point(rect.x + rect.width * 3 / 2, rect.y + rect.height * 3 / 2));

    Mat dst = new Mat(searchRect.width - temp.width() + 1, searchRect.height - temp.height() + 1, CV_32FC1);

    if ((searchRect.x < 0 || searchRect.y < 0) || (searchRect.x + searchRect.width > grayFrame.cols()
            || searchRect.y + searchRect.height > grayFrame.rows()))
        return null;

    Imgproc.matchTemplate(grayFrame.submat(searchRect), temp, dst, Imgproc.TM_SQDIFF_NORMED);

    Core.MinMaxLocResult result = Core.minMaxLoc(dst);

    //check new location: if coordinates change so variously, remain previous location
    if (true) {
        rect.x = (int) (searchRect.x + result.minLoc.x);
        rect.y = (int) (searchRect.y + result.minLoc.y);
        return rect;
    } else {
        return null;
    }
}

From source file:net.bsrc.cbod.opencv.OpenCV.java

/**
 * @param imgPath/*  w w  w.  j a  v a2s.c  om*/
 * @param box
 * @return
 */
public static Mat getImageMat(String imgPath, PascalBndBox box) {

    Mat result = null;
    Mat org = getImageMat(imgPath);

    Point[] arr = new Point[] { new Point(box.getXmin(), box.getYmin()),
            new Point(box.getXmin(), box.getYmax() - 1), new Point(box.getXmax() - 1, box.getYmin()),
            new Point(box.getXmax() - 1, box.getYmax() - 1) };
    try {
        Rect r = Imgproc.boundingRect(new MatOfPoint(arr));
        result = org.submat(r);

    } catch (CvException ex) {
        logger.error("", ex);
    }

    return result;
}