Example usage for org.opencv.core Mat rows

List of usage examples for org.opencv.core Mat rows

Introduction

In this page you can find the example usage for org.opencv.core Mat rows.

Prototype

public int rows() 

Source Link

Usage

From source file:src.model.filters.ErosionFilter.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    System.out.println("**______________EROSION_______________**");

    try {/*  ww w  .  ja  v  a2 s  .  com*/

        String imgInput = request.getParameter("name").toString();
        String savePath = savePath(request);
        //____________________________________
        Mat source = Imgcodecs.imread(savePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());

        destination = source;

        int erosion_size = 5;
        int dilation_size = 5;

        Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT,
                new Size(2 * erosion_size + 1, 2 * erosion_size + 1));
        Imgproc.erode(source, destination, element);

        //            String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_ER_temp.jpg";
        //            src.model.ApplyFilter.renameTemps(request);
        //            src.model.ApplyFilter.deleteTemps(output);
        //            String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_ER_temp.jpg";
        //            Imgcodecs.imwrite(output, destination);
        String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_ER_temp.jpg";
        imgInput = request.getParameter("name").toString();
        String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_ER_temp.jpg";
        Imgcodecs.imwrite(output, destination);

        //____________________________________
        System.out.println("output: " + output);
        System.out.println("imgOutput: " + imgOutput);

        publishImg(response, imgOutput);

    } catch (Exception e) {
        System.out.println("Error: " + e.getMessage());
    }
}

From source file:src.model.filters.GaussianFilter.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    System.out.println("**______________GAUSSIAN_______________**");

    try {//from  w  w w .  jav a2s.c o  m

        String imgInput = request.getParameter("name").toString();
        String savePath = savePath(request);
        //____________________________________

        Mat source = Imgcodecs.imread(savePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);

        Mat destination = new Mat(source.rows(), source.cols(), source.type());
        Imgproc.GaussianBlur(source, destination, new Size(35, 35), 0);

        String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_GA_temp.jpg";
        imgInput = request.getParameter("name").toString();
        String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_GA_temp.jpg";
        Imgcodecs.imwrite(output, destination);

        //____________________________________
        System.out.println("output: " + output);
        System.out.println("imgOutput: " + imgOutput);

        publishImg(response, imgOutput);

    } catch (Exception e) {
        System.out.println("Error: " + e.getMessage());
    }
}

From source file:src.model.filters.GrayscaleFilter.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    System.out.println("**______________GRAYSCALE_______________**");

    try {//from  w w w  .ja v  a 2s  .com

        String imgInput = request.getParameter("name").toString();
        String savePath = savePath(request);
        //____________________________________

        Mat source = Imgcodecs.imread(savePath);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());

        Imgproc.cvtColor(source, destination, Imgproc.COLOR_BGR2GRAY);

        String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_BW_temp.jpg";
        imgInput = request.getParameter("name").toString();
        String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_BW_temp.jpg";
        Imgcodecs.imwrite(output, destination);

        //____________________________________
        System.out.println("output: " + output);
        System.out.println("imgOutput: " + imgOutput);

        publishImg(response, imgOutput);

    } catch (Exception e) {
        System.out.println("Error: " + e.getMessage());
    }
}

From source file:src.model.filters.MorphFilter.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    System.out.println("**______________MORPH_______________**");

    try {//from  w  ww . ja va2s.c  o  m

        String imgInput = request.getParameter("name").toString();
        String savePath = savePath(request);
        //____________________________________

        int elementSize = 9;

        Mat source = Imgcodecs.imread(savePath);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());
        Mat element = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT,
                new Size(elementSize * 2 + 1, elementSize * 2 + 1), new Point(elementSize, elementSize));
        Imgproc.morphologyEx(source, destination, Imgproc.MORPH_GRADIENT, element);

        String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_MORPH_temp.jpg";
        imgInput = request.getParameter("name").toString();
        String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_MORPH_temp.jpg";
        Imgcodecs.imwrite(output, destination);

        //____________________________________
        System.out.println("output: " + output);
        System.out.println("imgOutput: " + imgOutput);

        publishImg(response, imgOutput);

    } catch (Exception e) {
        System.out.println("Error: " + e.getMessage());
    }
}

From source file:src.model.filters.SobelFilter.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    System.out.println("**______________SOBEL_______________**");

    try {/*from ww  w.ja v a 2s .com*/

        String imgInput = request.getParameter("name").toString();
        String savePath = savePath(request);
        //____________________________________  

        int kernelSize = 3;
        // Mat source = Imgcodecs.imread(folder+"\\"+imgName, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);

        Mat source = Imgcodecs.imread(savePath, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
        Mat destination = new Mat(source.rows(), source.cols(), source.type());

        Mat kernel = new Mat(kernelSize, kernelSize, CvType.CV_32F) {
            {
                put(0, 0, -3);
                put(0, 1, -3);
                put(0, 2, -3);

                put(1, 0 - 3);
                put(1, 1, 0);
                put(1, 2, -3);

                put(2, 0, 5);
                put(2, 1, 5);
                put(2, 2, 5);
            }
        };

        Imgproc.filter2D(source, destination, -8, kernel);
        String output = savePath.substring(0, savePath.lastIndexOf(".")) + "_SOBEL_temp.jpg";
        imgInput = request.getParameter("name").toString();
        String imgOutput = imgInput.substring(0, imgInput.lastIndexOf(".")) + "_SOBEL_temp.jpg";
        Imgcodecs.imwrite(output, destination);

        //____________________________________
        System.out.println("output: " + output);
        System.out.println("imgOutput: " + imgOutput);

        publishImg(response, imgOutput);

    } catch (Exception e) {
        System.out.println("Error: " + e.getMessage());
    }
}

From source file:syncleus.dann.data.video.Grid.java

License:Apache License

public Grid(Mat img, Rect trackedBox, int minWinSide) {
    // TODO why do we generate so many BAD boxes, only to remove them later on !?
    // OR do we need them to re-asses which ones are bad later on ?
    for (int s = 0; s < SCALES.length; s++) {
        final int width = Math.round(trackedBox.width * SCALES[s]);
        final int height = Math.round(trackedBox.height * SCALES[s]);
        final int minBbSide = Math.min(height, width);

        // continue ONLY if the future box is "reasonable": bigger than the min window and smaller than the full image !
        if (minBbSide >= minWinSide && width <= img.cols() && height <= img.rows()) {
            trackedBoxScales.add(new Size(width, height));
            final int shift = Math.round(SHIFT * minBbSide);

            for (int row = 1; row < (img.rows() - height); row += shift) {
                for (int col = 1; col < (img.cols() - width); col += shift) {
                    final BoundingBox bbox = new BoundingBox();
                    bbox.x = col;/*  w w w  . ja va2  s .  c  om*/
                    bbox.y = row;
                    bbox.width = width;
                    bbox.height = height;
                    bbox.scaleIdx = trackedBoxScales.size() - 1; // currently last one in this list

                    grid.add(bbox);
                }
            }
        }
    }
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

public void init(Mat frame1, Rect trackedBox) {
    // get Bounding boxes
    if (Math.min(trackedBox.width, trackedBox.height) < _params.min_win) {
        throw new IllegalArgumentException(
                "Provided trackedBox: " + trackedBox + " is too small (min " + _params.min_win + ")");
    }/* w w  w  .  j a  v  a2  s  .  co  m*/
    _grid = new Grid(frame1, trackedBox, _params.min_win);
    System.out.println("Init Created " + _grid.getSize() + " bounding boxes.");
    _grid.updateGoodBadBoxes(trackedBox, _params.num_closest_init);

    _iiRows = frame1.rows();
    _iiCols = frame1.cols();
    _iisum.create(_iiRows, _iiCols, CvType.CV_32F);
    _iisqsum.create(_iiRows, _iiCols, CvType.CV_64F);

    // correct bounding box
    _lastbox = _grid.getBestBox();

    _classifierFern.init(_grid.getTrackedBoxScales(), _rng);

    // generate DATA
    // generate POSITIVE DATA
    generatePositiveData(frame1, _params.num_warps_init, _grid);

    // Set variance threshold
    MatOfDouble stddev = new MatOfDouble();
    Core.meanStdDev(frame1.submat(_grid.getBestBox()), new MatOfDouble(), stddev);
    updateIntegralImgs(frame1);
    // this is directly half of the variance of the initial box, which will be used the the 1st stage of the classifier
    _var = (float) Math.pow(stddev.toArray()[0], 2d) * 0.5f;
    // check variance
    final double checkVar = TLDUtil.getVar(_grid.getBestBox(), _iisumJava, _iisqsumJava, _iiCols) * 0.5;
    System.out.println("Variance: " + _var + " / Check variance: " + checkVar);

    // generate NEGATIVE DATA
    final Pair<List<Pair<int[], Boolean>>, List<Mat>> negData = generateNegativeData(frame1);

    // Split Negative Ferns <features, labels=false> into Training and Testing sets (they are already shuffled)
    final int nFernsSize = negData.first.size();
    final List<Pair<int[], Boolean>> nFernsTest = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(0, nFernsSize / 2));
    final List<Pair<int[], Boolean>> nFerns = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(nFernsSize / 2, nFernsSize));

    // Split Negative NN Examples into Training and Testing sets
    final int nExSize = negData.second.size();
    final List<Mat> nExamplesTest = new ArrayList<Mat>(negData.second.subList(0, nExSize / 2));
    _nExamples = new ArrayList<Mat>(negData.second.subList(nExSize / 2, nExSize));

    //MERGE Negative Data with Positive Data and shuffle it
    final List<Pair<int[], Boolean>> fernsData = new ArrayList<Pair<int[], Boolean>>(_pFerns);
    fernsData.addAll(nFerns);
    Collections.shuffle(fernsData);

    // TRAINING
    System.out.println("Init Start Training with " + fernsData.size() + " ferns, " + _nExamples.size()
            + " nExamples, " + nFernsTest.size() + " nFernsTest, " + nExamplesTest.size() + " nExamplesTest");
    _classifierFern.trainF(fernsData, 10);
    _classifierNN.trainNN(_pExample, _nExamples);
    // Threshold evaluation on testing sets
    _classifierFern.evaluateThreshold(nFernsTest);
    _classifierNN.evaluateThreshold(nExamplesTest);
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

private TrackingStruct track(final Mat lastImg, final Mat currentImg, final BoundingBox lastBox) {
    System.out.println("[TRACK]");

    // Generate points
    final Point[] lastPoints = lastBox.points();
    if (lastPoints.length == 0) {
        System.out.println("Points not generated from lastBox: " + lastBox);
        return null;
    }/*www .  j a v a2 s .c o  m*/

    // Frame-to-frame tracking with forward-backward error checking
    final Pair<Point[], Point[]> trackedPoints = _tracker.track(lastImg, currentImg, lastPoints);
    if (trackedPoints == null) {
        System.out.println("No points could be tracked.");
        return null;
    }
    if (_tracker.getMedianErrFB() > _params.tracker_stability_FBerrMax) {
        System.out.println("TRACKER too unstable. FB Median error: " + _tracker.getMedianErrFB() + " > "
                + _params.tracker_stability_FBerrMax);
        // return null;  // we hope the detection will find the pattern again
    }

    // bounding box prediction
    final BoundingBox predictedBB = lastBox.predict(trackedPoints.first, trackedPoints.second);
    if (predictedBB.x > currentImg.cols() || predictedBB.y > currentImg.rows() || predictedBB.br().x < 1
            || predictedBB.br().y < 1) {
        System.out.println("TRACKER Predicted bounding box out of range !");
        return null;
    }

    // estimate Confidence
    Mat pattern = new Mat();
    try {
        resizeZeroMeanStdev(currentImg.submat(predictedBB.intersect(currentImg)), pattern, _params.patch_size);
    } catch (Throwable t) {
        System.out.println("PredBB when failed: " + predictedBB);
    }
    //System.out.println("Confidence " + pattern.dump());      

    //Conservative Similarity
    final NNConfStruct nnConf = _classifierNN.nnConf(pattern);
    System.out.println("Tracking confidence: " + nnConf.conservativeSimilarity);

    System.out.println("[TRACK END]");
    return new TrackingStruct(nnConf.conservativeSimilarity, predictedBB, trackedPoints.first,
            trackedPoints.second);
}

From source file:syncleus.dann.data.video.Tld.java

License:Apache License

/**
 * Structure the classifier into 3 stages:
 * a) patch variance/*  w ww .j  a  v a2 s.co m*/
 * b) ensemble of ferns classifier
 * c) nearest neighbour
 */
private Pair<List<DetectionStruct>, List<DetectionStruct>> detect(final Mat frame) {
    System.out.println("[DETECT]");

    final List<DetectionStruct> fernClassDetected = new ArrayList<Tld.DetectionStruct>(); //dt
    final List<DetectionStruct> nnMatches = new ArrayList<Tld.DetectionStruct>(); //dbb

    // 0. Cleaning
    _boxClusterMap.clear();

    // 1. DETECTION
    final Mat img = new Mat(frame.rows(), frame.cols(), CvType.CV_8U);
    updateIntegralImgs(frame);
    Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5);

    // Apply the Variance filter TODO : Bottleneck
    int a = 0;
    for (BoundingBox box : _grid) {
        // a) speed up by doing the features/ferns check ONLY if the variance is high enough !
        if (TLDUtil.getVar(box, _iisumJava, _iisqsumJava, _iiCols) >= _var) {
            a++;
            final Mat patch = img.submat(box);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, box.scaleIdx);
            final double averagePosterior = _classifierFern.averagePosterior(allFernsHashCodes);
            _fernDetectionNegDataForLearning.put(box, allFernsHashCodes);// store for later use in learning

            // b)
            if (averagePosterior > _classifierFern.getFernPosThreshold()) {
                fernClassDetected.add(new DetectionStruct(box, allFernsHashCodes, averagePosterior, patch));
            }
        }
    }

    System.out.println(a + " Bounding boxes passed the variance filter (" + _var + ")");
    System.out.println(fernClassDetected.size() + " Initial detected from Fern Classifier");
    if (fernClassDetected.size() == 0) {
        System.out.println("[DETECT END]");
        return null;
    }

    // keep only the best
    TLDUtil.keepBestN(fernClassDetected, MAX_DETECTED, new Comparator<DetectionStruct>() {
        @Override
        public int compare(DetectionStruct detS1, DetectionStruct detS2) {
            return Double.compare(detS1.averagePosterior, detS2.averagePosterior);
        }
    });

    // 2. MATCHING using the NN classifier  c)
    for (DetectionStruct detStruct : fernClassDetected) {
        // update detStruct.patch to params.patch_size and normalise it
        Mat pattern = new Mat();
        resizeZeroMeanStdev(detStruct.patch, pattern, _params.patch_size);
        detStruct.nnConf = _classifierNN.nnConf(pattern);

        System.out.println("NNConf: " + detStruct.nnConf.relativeSimilarity + " / "
                + detStruct.nnConf.conservativeSimilarity + " Threshold: " + _classifierNN.getNNThreshold());
        // only keep valid boxes
        if (detStruct.nnConf.relativeSimilarity > _classifierNN.getNNThreshold()) {
            nnMatches.add(detStruct);
        }
    }

    System.out.println("[DETECT END]");
    return new Pair<List<DetectionStruct>, List<DetectionStruct>>(fernClassDetected, nnMatches);
}

From source file:syncleus.dann.data.video.TLDView.java

License:Apache License

@Override
public Mat onCameraFrame(Mat originalFrame) {
    try {/*w  w w .j ava2s  .  co m*/
        // Image is too big and this requires too much CPU for a phone, so scale everything down...
        Imgproc.resize(originalFrame, _workingFrame, WORKING_FRAME_SIZE);
        final Size workingRatio = new Size(originalFrame.width() / WORKING_FRAME_SIZE.width,
                originalFrame.height() / WORKING_FRAME_SIZE.height);
        // usefull to see what we're actually working with...
        _workingFrame.copyTo(originalFrame.submat(originalFrame.rows() - _workingFrame.rows(),
                originalFrame.rows(), 0, _workingFrame.cols()));

        if (_trackedBox != null) {
            if (_tld == null) { // run the 1st time only
                Imgproc.cvtColor(_workingFrame, _lastGray, Imgproc.COLOR_RGB2GRAY);
                _tld = new Tld(_tldProperties);
                final Rect scaledDownTrackedBox = scaleDown(_trackedBox, workingRatio);
                System.out.println("Working Ration: " + workingRatio + " / Tracking Box: " + _trackedBox
                        + " / Scaled down to: " + scaledDownTrackedBox);
                try {
                    _tld.init(_lastGray, scaledDownTrackedBox);
                } catch (Exception eInit) {
                    // start from scratch, you have to select an init box again !
                    _trackedBox = null;
                    _tld = null;
                    throw eInit; // re-throw it as it will be dealt with later
                }
            } else {
                Imgproc.cvtColor(_workingFrame, _currentGray, Imgproc.COLOR_RGB2GRAY);

                _processFrameStruct = _tld.processFrame(_lastGray, _currentGray);
                drawPoints(originalFrame, _processFrameStruct.lastPoints, workingRatio, new Scalar(255, 0, 0));
                drawPoints(originalFrame, _processFrameStruct.currentPoints, workingRatio,
                        new Scalar(0, 255, 0));
                drawBox(originalFrame, scaleUp(_processFrameStruct.currentBBox, workingRatio),
                        new Scalar(0, 0, 255));

                _currentGray.copyTo(_lastGray);

                // overlay the current positive examples on the real image(needs converting at the same time !)
                //copyTo(_tld.getPPatterns(), originalFrame);
            }
        }
    } catch (Exception e) {
        _errMessage = e.getClass().getSimpleName() + " / " + e.getMessage();
        Log.e(TLDUtil.TAG, "TLDView PROBLEM", e);
    }

    if (_errMessage != null) {
        Core.putText(originalFrame, _errMessage, new Point(0, 300), Core.FONT_HERSHEY_PLAIN, 1.3d,
                new Scalar(255, 0, 0), 2);
    }

    return originalFrame;
}