Example usage for org.opencv.core Mat cols

List of usage examples for org.opencv.core Mat cols

Introduction

In this page you can find the example usage for org.opencv.core Mat cols.

Prototype

public int cols() 

Source Link

Usage

From source file:com.trandi.opentld.tld.BoundingBox.java

License:Apache License

BoundingBox intersect(final Mat img) {
    final BoundingBox result = new BoundingBox();
    result.x = Math.max(x, 0);/*  w  ww  .  jav a  2 s  .  c  o m*/
    result.y = Math.max(y, 0);
    result.width = (int) Math.min(Math.min(img.cols() - x, width), Math.min(width, br().x));
    result.height = (int) Math.min(Math.min(img.rows() - y, height), Math.min(height, br().y));
    return result;
}

From source file:com.trandi.opentld.tld.FernEnsembleClassifier.java

License:Apache License

/**
 * The numbers in this array can be up to 2^params.structSize as we shift left once of each feature
 *//*  ww  w . j ava 2 s  .  c om*/
int[] getAllFernsHashCodes(final Mat patch, int scaleIdx) {
    final int[] result = new int[ferns.length];
    final byte[] imageData = Util.getByteArray(patch);
    final int cols = patch.cols();
    for (int fern = 0; fern < ferns.length; fern++) {
        result[fern] = ferns[fern].calculateHashCode(scaleIdx, imageData, cols);
    }

    return result;
}

From source file:com.trandi.opentld.tld.Grid.java

License:Apache License

Grid(Mat img, Rect trackedBox, int minWinSide) {
    // TODO why do we generate so many BAD boxes, only to remove them later on !?
    // OR do we need them to re-asses which ones are bad later on ?
    for (int s = 0; s < SCALES.length; s++) {
        final int width = Math.round(trackedBox.width * SCALES[s]);
        final int height = Math.round(trackedBox.height * SCALES[s]);
        final int minBbSide = Math.min(height, width);

        // continue ONLY if the future box is "reasonable": bigger than the min window and smaller than the full image !
        if (minBbSide >= minWinSide && width <= img.cols() && height <= img.rows()) {
            trackedBoxScales.add(new Size(width, height));
            final int shift = Math.round(SHIFT * minBbSide);

            for (int row = 1; row < (img.rows() - height); row += shift) {
                for (int col = 1; col < (img.cols() - width); col += shift) {
                    final BoundingBox bbox = new BoundingBox();
                    bbox.x = col;/*from w ww .  jav a2  s  .c o m*/
                    bbox.y = row;
                    bbox.width = width;
                    bbox.height = height;
                    bbox.scaleIdx = trackedBoxScales.size() - 1; // currently last one in this list

                    grid.add(bbox);
                }
            }
        }
    }
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

public void init(Mat frame1, Rect trackedBox) {
    // get Bounding boxes
    if (Math.min(trackedBox.width, trackedBox.height) < _params.min_win) {
        throw new IllegalArgumentException(
                "Provided trackedBox: " + trackedBox + " is too small (min " + _params.min_win + ")");
    }/*from www . j av a  2s  .  co  m*/
    _grid = new Grid(frame1, trackedBox, _params.min_win);
    Log.i(Util.TAG, "Init Created " + _grid.getSize() + " bounding boxes.");
    _grid.updateGoodBadBoxes(trackedBox, _params.num_closest_init);

    _iiRows = frame1.rows();
    _iiCols = frame1.cols();
    _iisum.create(_iiRows, _iiCols, CvType.CV_32F);
    _iisqsum.create(_iiRows, _iiCols, CvType.CV_64F);

    // correct bounding box
    _lastbox = _grid.getBestBox();

    _classifierFern.init(_grid.getTrackedBoxScales(), _rng);

    // generate DATA
    // generate POSITIVE DATA
    generatePositiveData(frame1, _params.num_warps_init, _grid);

    // Set variance threshold
    MatOfDouble stddev = new MatOfDouble();
    Core.meanStdDev(frame1.submat(_grid.getBestBox()), new MatOfDouble(), stddev);
    updateIntegralImgs(frame1);
    // this is directly half of the variance of the initial box, which will be used the the 1st stage of the classifier
    _var = (float) Math.pow(stddev.toArray()[0], 2d) * 0.5f;
    // check variance
    final double checkVar = Util.getVar(_grid.getBestBox(), _iisumJava, _iisqsumJava, _iiCols) * 0.5;
    Log.i(Util.TAG, "Variance: " + _var + " / Check variance: " + checkVar);

    // generate NEGATIVE DATA
    final Pair<List<Pair<int[], Boolean>>, List<Mat>> negData = generateNegativeData(frame1);

    // Split Negative Ferns <features, labels=false> into Training and Testing sets (they are already shuffled)
    final int nFernsSize = negData.first.size();
    final List<Pair<int[], Boolean>> nFernsTest = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(0, nFernsSize / 2));
    final List<Pair<int[], Boolean>> nFerns = new ArrayList<Pair<int[], Boolean>>(
            negData.first.subList(nFernsSize / 2, nFernsSize));

    // Split Negative NN Examples into Training and Testing sets
    final int nExSize = negData.second.size();
    final List<Mat> nExamplesTest = new ArrayList<Mat>(negData.second.subList(0, nExSize / 2));
    _nExamples = new ArrayList<Mat>(negData.second.subList(nExSize / 2, nExSize));

    //MERGE Negative Data with Positive Data and shuffle it
    final List<Pair<int[], Boolean>> fernsData = new ArrayList<Pair<int[], Boolean>>(_pFerns);
    fernsData.addAll(nFerns);
    Collections.shuffle(fernsData);

    // TRAINING
    Log.i(Util.TAG, "Init Start Training with " + fernsData.size() + " ferns, " + _nExamples.size()
            + " nExamples, " + nFernsTest.size() + " nFernsTest, " + nExamplesTest.size() + " nExamplesTest");
    _classifierFern.trainF(fernsData, 10);
    _classifierNN.trainNN(_pExample, _nExamples);
    // Threshold evaluation on testing sets
    _classifierFern.evaluateThreshold(nFernsTest);
    _classifierNN.evaluateThreshold(nExamplesTest);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

private TrackingStruct track(final Mat lastImg, final Mat currentImg, final BoundingBox lastBox) {
    Log.i(Util.TAG, "[TRACK]");

    // Generate points
    final Point[] lastPoints = lastBox.points();
    if (lastPoints.length == 0) {
        Log.e(Util.TAG, "Points not generated from lastBox: " + lastBox);
        return null;
    }//from   w  w  w  . j  a  va2s  . c  o m

    // Frame-to-frame tracking with forward-backward error checking
    final Pair<Point[], Point[]> trackedPoints = _tracker.track(lastImg, currentImg, lastPoints);
    if (trackedPoints == null) {
        Log.e(Util.TAG, "No points could be tracked.");
        return null;
    }
    if (_tracker.getMedianErrFB() > _params.tracker_stability_FBerrMax) {
        Log.w(Util.TAG, "TRACKER too unstable. FB Median error: " + _tracker.getMedianErrFB() + " > "
                + _params.tracker_stability_FBerrMax);
        // return null;  // we hope the detection will find the pattern again
    }

    // bounding box prediction
    final BoundingBox predictedBB = lastBox.predict(trackedPoints.first, trackedPoints.second);
    if (predictedBB.x > currentImg.cols() || predictedBB.y > currentImg.rows() || predictedBB.br().x < 1
            || predictedBB.br().y < 1) {
        Log.e(Util.TAG, "TRACKER Predicted bounding box out of range !");
        return null;
    }

    // estimate Confidence
    Mat pattern = new Mat();
    try {
        resizeZeroMeanStdev(currentImg.submat(predictedBB.intersect(currentImg)), pattern, _params.patch_size);
    } catch (Throwable t) {
        Log.e(Util.TAG, "PredBB when failed: " + predictedBB);
    }
    //Log.i(Util.TAG, "Confidence " + pattern.dump());      

    //Conservative Similarity
    final NNConfStruct nnConf = _classifierNN.nnConf(pattern);
    Log.i(Util.TAG, "Tracking confidence: " + nnConf.conservativeSimilarity);

    Log.i(Util.TAG, "[TRACK END]");
    return new TrackingStruct(nnConf.conservativeSimilarity, predictedBB, trackedPoints.first,
            trackedPoints.second);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

/**
 * Structure the classifier into 3 stages:
 * a) patch variance//from  w w w  . ja  v  a2s  .co m
 * b) ensemble of ferns classifier
 * c) nearest neighbour
 */
private Pair<List<DetectionStruct>, List<DetectionStruct>> detect(final Mat frame) {
    Log.i(Util.TAG, "[DETECT]");

    final List<DetectionStruct> fernClassDetected = new ArrayList<Tld.DetectionStruct>(); //dt
    final List<DetectionStruct> nnMatches = new ArrayList<Tld.DetectionStruct>(); //dbb

    // 0. Cleaning
    _boxClusterMap.clear();

    // 1. DETECTION
    final Mat img = new Mat(frame.rows(), frame.cols(), CvType.CV_8U);
    updateIntegralImgs(frame);
    Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5);

    // Apply the Variance filter TODO : Bottleneck
    int a = 0;
    for (BoundingBox box : _grid) {
        // a) speed up by doing the features/ferns check ONLY if the variance is high enough !
        if (Util.getVar(box, _iisumJava, _iisqsumJava, _iiCols) >= _var) {
            a++;
            final Mat patch = img.submat(box);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, box.scaleIdx);
            final double averagePosterior = _classifierFern.averagePosterior(allFernsHashCodes);
            _fernDetectionNegDataForLearning.put(box, allFernsHashCodes);// store for later use in learning

            // b)
            if (averagePosterior > _classifierFern.getFernPosThreshold()) {
                fernClassDetected.add(new DetectionStruct(box, allFernsHashCodes, averagePosterior, patch));
            }
        }
    }

    Log.i(Util.TAG, a + " Bounding boxes passed the variance filter (" + _var + ")");
    Log.i(Util.TAG, fernClassDetected.size() + " Initial detected from Fern Classifier");
    if (fernClassDetected.size() == 0) {
        Log.i(Util.TAG, "[DETECT END]");
        return null;
    }

    // keep only the best
    Util.keepBestN(fernClassDetected, MAX_DETECTED, new Comparator<DetectionStruct>() {
        @Override
        public int compare(DetectionStruct detS1, DetectionStruct detS2) {
            return Double.compare(detS1.averagePosterior, detS2.averagePosterior);
        }
    });

    // 2. MATCHING using the NN classifier  c)
    for (DetectionStruct detStruct : fernClassDetected) {
        // update detStruct.patch to params.patch_size and normalise it
        Mat pattern = new Mat();
        resizeZeroMeanStdev(detStruct.patch, pattern, _params.patch_size);
        detStruct.nnConf = _classifierNN.nnConf(pattern);

        Log.i(Util.TAG, "NNConf: " + detStruct.nnConf.relativeSimilarity + " / "
                + detStruct.nnConf.conservativeSimilarity + " Threshold: " + _classifierNN.getNNThreshold());
        // only keep valid boxes
        if (detStruct.nnConf.relativeSimilarity > _classifierNN.getNNThreshold()) {
            nnMatches.add(detStruct);
        }
    }

    Log.i(Util.TAG, "[DETECT END]");
    return new Pair<List<DetectionStruct>, List<DetectionStruct>>(fernClassDetected, nnMatches);
}

From source file:com.trandi.opentld.TLDView.java

License:Apache License

private static void copyTo(List<Mat> patterns, Mat dest) {
    if (patterns == null || patterns.isEmpty() || dest == null)
        return;//from w w  w.jav  a  2s .  c o m

    final int patternRows = patterns.get(0).rows();
    final int patternCols = patterns.get(0).cols();
    final int vertCount = dest.rows() / patternRows;
    final int horizCount = patterns.size() / vertCount + 1;

    int patchIdx = 0;
    for (int col = dest.cols() - horizCount * patternCols - 1; col < dest.cols()
            && patchIdx < patterns.size(); col += patternCols) {
        for (int row = 0; row < dest.rows() && patchIdx < patterns.size(); row += patternRows) {
            Imgproc.cvtColor(patterns.get(patchIdx),
                    dest.submat(row, row + patternRows, col, col + patternCols), Imgproc.COLOR_GRAY2RGBA);
            patchIdx++;
        }
    }
}

From source file:com.ttolley.pongbot.controller.CvPanel.java

/**
 * Converts/writes a Mat into a BufferedImage.
 *
 * @param matrix Mat of type CV_8UC3 or CV_8UC1
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *///from www . j  a  v a  2s  .  c  om
public BufferedImage matToBufferedImage(Mat matrix) {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        // bgr to rgb  
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        return null;
    }
    BufferedImage image2 = new BufferedImage(cols, rows, type);
    image2.getRaster().setDataElements(0, 0, cols, rows, data);
    return image2;
}

From source file:com.untref.bordes.HoughCirculos.java

public static BufferedImage implementarCiculos(BufferedImage screen, int acumulador, int radioMin,
        int radioMax) {
    Mat source = new Mat(screen.getHeight(), screen.getWidth(), CvType.CV_8UC3);
    byte[] data = ((DataBufferByte) screen.getRaster().getDataBuffer()).getData();
    source.put(0, 0, data);/*  w w  w .  j  a  va2 s  .  c  o  m*/
    //ImageIO.write(screen, "jpg", "imagen");
    //Mat source = Highgui.imread("test.jpg", Highgui.CV_LOAD_IMAGE_COLOR);
    Mat destination = new Mat(source.rows(), source.cols(), source.type());

    Imgproc.cvtColor(source, destination, Imgproc.COLOR_RGB2GRAY);

    Imgproc.GaussianBlur(destination, destination, new Size(3, 3), 0, 0);

    Mat circles = new Mat();
    Imgproc.HoughCircles(destination, circles, Imgproc.CV_HOUGH_GRADIENT, 1, 30, 10, acumulador, radioMin,
            radioMax);

    int radius;
    org.opencv.core.Point pt;
    for (int x = 0; x < circles.cols(); x++) {
        double vCircle[] = circles.get(0, x);

        if (vCircle == null) {
            break;
        }

        pt = new org.opencv.core.Point(Math.round(vCircle[0]), Math.round(vCircle[1]));
        radius = (int) Math.round(vCircle[2]);

        // draw the found circle
        Core.circle(source, pt, radius, new Scalar(150, 0, 0), 2);
        Core.circle(source, pt, 1, new Scalar(0, 0, 0), 2);
    }
    BufferedImage res = matToBufferedImage(source);

    return res;

}

From source file:com.untref.gui.Editar.java

public static BufferedImage matToBufferedImage(Mat matrix) {
    BufferedImage bimg = new BufferedImage(1, 1, 1);
    if (matrix != null) {
        int cols = matrix.cols();
        int rows = matrix.rows();
        int elemSize = (int) matrix.elemSize();
        byte[] data = new byte[cols * rows * elemSize];
        int type;
        matrix.get(0, 0, data);// www  . j a v  a 2  s.c  o m
        switch (matrix.channels()) {
        case 1:
            type = BufferedImage.TYPE_BYTE_GRAY;
            break;
        case 3:
            type = BufferedImage.TYPE_3BYTE_BGR;
            // bgr to rgb  
            byte b;
            for (int i = 0; i < data.length; i = i + 3) {
                b = data[i];
                data[i] = data[i + 2];
                data[i + 2] = b;
            }
            break;
        default:
            return null;
        }

        // Reuse existing BufferedImage if possible
        if (bimg == null || bimg.getWidth() != cols || bimg.getHeight() != rows || bimg.getType() != type) {
            bimg = new BufferedImage(cols, rows, type);
        }
        bimg.getRaster().setDataElements(0, 0, cols, rows, data);
    } else { // mat was null
        bimg = null;
    }
    return bimg;
}