Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:com.sikulix.core.Finder.java

License:Open Source License

public boolean hasChanges(Mat base, Mat current) {
    int PIXEL_DIFF_THRESHOLD = 5;
    int IMAGE_DIFF_THRESHOLD = 5;
    Mat bg = new Mat();
    Mat cg = new Mat();
    Mat diff = new Mat();
    Mat tdiff = new Mat();

    Imgproc.cvtColor(base, bg, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(current, cg, Imgproc.COLOR_BGR2GRAY);
    Core.absdiff(bg, cg, diff);//w  w  w  . java2 s  .  co m
    Imgproc.threshold(diff, tdiff, PIXEL_DIFF_THRESHOLD, 0.0, Imgproc.THRESH_TOZERO);
    if (Core.countNonZero(tdiff) <= IMAGE_DIFF_THRESHOLD) {
        return false;
    }

    Imgproc.threshold(diff, diff, PIXEL_DIFF_THRESHOLD, 255, Imgproc.THRESH_BINARY);
    Imgproc.dilate(diff, diff, new Mat());
    Mat se = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.morphologyEx(diff, diff, Imgproc.MORPH_CLOSE, se);

    List<MatOfPoint> points = new ArrayList<MatOfPoint>();
    Mat contours = new Mat();
    Imgproc.findContours(diff, points, contours, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
    int n = 0;
    for (Mat pm : points) {
        log.trace("(%d) %s", n++, pm);
        printMatI(pm);
    }
    log.trace("contours: %s", contours);
    printMatI(contours);
    return true;
}

From source file:com.sikulix.core.SXElement.java

License:Open Source License

protected static Mat makeMat(BufferedImage bImg) {
    Mat aMat = new Mat();
    if (bImg.getType() == BufferedImage.TYPE_INT_RGB) {
        log.trace("makeMat: INT_RGB (%dx%d)", bImg.getWidth(), bImg.getHeight());
        int[] data = ((DataBufferInt) bImg.getRaster().getDataBuffer()).getData();
        ByteBuffer byteBuffer = ByteBuffer.allocate(data.length * 4);
        IntBuffer intBuffer = byteBuffer.asIntBuffer();
        intBuffer.put(data);/* w w w  . jav  a 2  s .  c om*/
        aMat = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC4);
        aMat.put(0, 0, byteBuffer.array());
        Mat oMatBGR = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC3);
        Mat oMatA = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC1);
        java.util.List<Mat> mixIn = new ArrayList<Mat>(Arrays.asList(new Mat[] { aMat }));
        java.util.List<Mat> mixOut = new ArrayList<Mat>(Arrays.asList(new Mat[] { oMatA, oMatBGR }));
        //A 0 - R 1 - G 2 - B 3 -> A 0 - B 1 - G 2 - R 3
        Core.mixChannels(mixIn, mixOut, new MatOfInt(0, 0, 1, 3, 2, 2, 3, 1));
        return oMatBGR;
    } else if (bImg.getType() == BufferedImage.TYPE_3BYTE_BGR) {
        log.error("makeMat: 3BYTE_BGR (%dx%d)", bImg.getWidth(), bImg.getHeight());
        byte[] data = ((DataBufferByte) bImg.getRaster().getDataBuffer()).getData();
        aMat = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC3);
        aMat.put(0, 0, data);
        return aMat;
    } else if (bImg.getType() == BufferedImage.TYPE_4BYTE_ABGR) {
        log.trace("makeMat: TYPE_4BYTE_ABGR (%dx%d)", bImg.getWidth(), bImg.getHeight());
        byte[] data = ((DataBufferByte) bImg.getRaster().getDataBuffer()).getData();
        aMat = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC4);
        aMat.put(0, 0, data);
        Mat oMatBGR = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC3);
        Mat oMatA = new Mat(bImg.getHeight(), bImg.getWidth(), CvType.CV_8UC1);
        java.util.List<Mat> mixIn = new ArrayList<Mat>(Arrays.asList(new Mat[] { aMat }));
        java.util.List<Mat> mixOut = new ArrayList<Mat>(Arrays.asList(new Mat[] { oMatA, oMatBGR }));
        //A 0 - R 1 - G 2 - B 3 -> A 0 - B 1 - G 2 - R 3
        Core.mixChannels(mixIn, mixOut, new MatOfInt(0, 0, 1, 1, 2, 2, 3, 3));
        return oMatBGR;
    } else {
        log.error("makeMat: Type not supported: %d (%dx%d)", bImg.getType(), bImg.getWidth(), bImg.getHeight());
    }
    return aMat;
}

From source file:com.sikulix.core.SXElement.java

License:Open Source License

public static BufferedImage getBufferedImage(Mat mat, String type) {
    BufferedImage bImg = null;/* w  ww.  j a va 2 s.c om*/
    MatOfByte bytemat = new MatOfByte();
    if (SX.isNull(mat)) {
        mat = new Mat();
    }
    Imgcodecs.imencode(type, mat, bytemat);
    byte[] bytes = bytemat.toArray();
    InputStream in = new ByteArrayInputStream(bytes);
    try {
        bImg = ImageIO.read(in);
    } catch (IOException ex) {
        log.error("getBufferedImage: %s error(%s)", mat, ex.getMessage());
    }
    return bImg;
}

From source file:com.sikulix.core.Visual.java

License:Open Source License

protected byte[] getImageBytes(String dotType) {
    MatOfByte bytemat = new MatOfByte();
    if (SX.isNull(content)) {
        content = new Mat();
    }/*www.java2 s  . com*/
    Highgui.imencode(dotType, content, bytemat);
    return bytemat.toArray();
}

From source file:com.superbool.easylpr.model.Transformation.java

public Mat remapSmallPointstoCrop(Mat smallMat, Mat transformationMatrix) {
    Mat remappedMat = new Mat();

    Core.perspectiveTransform(smallMat, remappedMat, transformationMatrix);

    return remappedMat;
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

private TrackingStruct track(final Mat lastImg, final Mat currentImg, final BoundingBox lastBox) {
    Log.i(Util.TAG, "[TRACK]");

    // Generate points
    final Point[] lastPoints = lastBox.points();
    if (lastPoints.length == 0) {
        Log.e(Util.TAG, "Points not generated from lastBox: " + lastBox);
        return null;
    }//from   w  w w. j a  v a  2s  .com

    // Frame-to-frame tracking with forward-backward error checking
    final Pair<Point[], Point[]> trackedPoints = _tracker.track(lastImg, currentImg, lastPoints);
    if (trackedPoints == null) {
        Log.e(Util.TAG, "No points could be tracked.");
        return null;
    }
    if (_tracker.getMedianErrFB() > _params.tracker_stability_FBerrMax) {
        Log.w(Util.TAG, "TRACKER too unstable. FB Median error: " + _tracker.getMedianErrFB() + " > "
                + _params.tracker_stability_FBerrMax);
        // return null;  // we hope the detection will find the pattern again
    }

    // bounding box prediction
    final BoundingBox predictedBB = lastBox.predict(trackedPoints.first, trackedPoints.second);
    if (predictedBB.x > currentImg.cols() || predictedBB.y > currentImg.rows() || predictedBB.br().x < 1
            || predictedBB.br().y < 1) {
        Log.e(Util.TAG, "TRACKER Predicted bounding box out of range !");
        return null;
    }

    // estimate Confidence
    Mat pattern = new Mat();
    try {
        resizeZeroMeanStdev(currentImg.submat(predictedBB.intersect(currentImg)), pattern, _params.patch_size);
    } catch (Throwable t) {
        Log.e(Util.TAG, "PredBB when failed: " + predictedBB);
    }
    //Log.i(Util.TAG, "Confidence " + pattern.dump());      

    //Conservative Similarity
    final NNConfStruct nnConf = _classifierNN.nnConf(pattern);
    Log.i(Util.TAG, "Tracking confidence: " + nnConf.conservativeSimilarity);

    Log.i(Util.TAG, "[TRACK END]");
    return new TrackingStruct(nnConf.conservativeSimilarity, predictedBB, trackedPoints.first,
            trackedPoints.second);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

private boolean learn(final Mat img, final List<DetectionStruct> fernClassDetected) {
    Log.i(Util.TAG, "[LEARN]");
    Mat pattern = new Mat();
    final double stdev = resizeZeroMeanStdev(img.submat(_lastbox.intersect(img)), pattern, _params.patch_size);
    final NNConfStruct confStruct = _classifierNN.nnConf(pattern);

    if (confStruct.relativeSimilarity < 0.5) {
        Log.w(Util.TAG, "Fast change, NOT learning");
        return false;
    }//from   www .  j  a  v  a  2s.  c  o  m
    if (Math.pow(stdev, 2) < _var) {
        Log.w(Util.TAG, "Low variance, NOT learning");
        return false;
    }
    if (confStruct.isin.inNegSet) {
        Log.w(Util.TAG, "Patch in negative data, NOT learning");
        return false;
    }

    // Data generation
    _grid.updateGoodBadBoxes(_lastbox, _params.num_closest_update);
    if (_grid.getGoodBoxes().length > 0) {
        generatePositiveData(img, _params.num_warps_update, _grid);
    } else {
        Log.w(Util.TAG, "NO good boxes, NOT learning.");
        return false;
    }

    // TODO why don't we learn from the GOOD boxes too !?
    final List<Pair<int[], Boolean>> fernExamples = new ArrayList<Util.Pair<int[], Boolean>>(_pFerns);
    for (BoundingBox badBox : _grid.getBadBoxes()) {
        final int[] allFernsHashCodes = _fernDetectionNegDataForLearning.get(badBox);
        if (allFernsHashCodes != null) {
            // these are NEGATIVE examples !
            fernExamples.add(new Pair<int[], Boolean>(allFernsHashCodes, false));
        }
    }

    final List<Mat> nnExamples = new ArrayList<Mat>();
    if (fernClassDetected != null) {
        for (DetectionStruct detStruct : fernClassDetected) {
            if (_lastbox.calcOverlap(detStruct.detectedBB) < Grid.BAD_OVERLAP) {
                nnExamples.add(detStruct.patch);
            }
        }
    }

    // Classifiers update
    _classifierFern.trainF(fernExamples, 2);
    _classifierNN.trainNN(_pExample, _nExamples);

    Log.i(Util.TAG, "[LEARN END]");
    return true;
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

/** Inputs:
 * - Image/*from  w  w  w  .  j  a v  a  2  s .co m*/
 * - bad_boxes (Boxes far from the bounding box)
 * - variance (pEx variance)
 * Outputs
 * - Negative fern features (nFerns)
 * - Negative NN examples (nExample)
 */
private Pair<List<Pair<int[], Boolean>>, List<Mat>> generateNegativeData(final Mat frame) {
    final List<Pair<int[], Boolean>> negFerns = new ArrayList<Pair<int[], Boolean>>();
    final List<Mat> negExamples = new ArrayList<Mat>();

    final List<BoundingBox> badBoxes = Arrays.asList(_grid.getBadBoxes());
    Collections.shuffle(badBoxes);
    Log.w(Util.TAG, "ST");
    // Get Fern Features of the boxes with big variance (calculated using integral images)
    for (BoundingBox badBox : badBoxes) {
        if (Util.getVar(badBox, _iisumJava, _iisqsumJava, _iiCols) >= _var * 0.5f) {
            final Mat patch = frame.submat(badBox);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, badBox.scaleIdx);
            negFerns.add(new Pair<int[], Boolean>(allFernsHashCodes, false));
        }
    }

    // select a hard coded number of negative examples
    Iterator<BoundingBox> bbIt = badBoxes.iterator();
    for (int i = 0; i < _params.num_bad_patches && bbIt.hasNext(); i++) {
        final Mat pattern = new Mat();
        final Mat patch = frame.submat(bbIt.next());
        resizeZeroMeanStdev(patch, pattern, _params.patch_size);
        negExamples.add(pattern);
    }

    Log.i(Util.TAG, "Negative examples generated. Ferns count: " + negFerns.size() + ". negEx count: "
            + negExamples.size());

    return new Pair<List<Pair<int[], Boolean>>, List<Mat>>(negFerns, negExamples);
}

From source file:com.trandi.opentld.tld.Tld.java

License:Apache License

/**
 * Generate Positive data //w ww.j av a  2 s.  c o  m
 * Inputs: 
 * - good_boxes 
 * - best_box 
 * - bbhull
 * Outputs: 
 * - Positive fern features (pFerns) 
 * - Positive NN examples (pExample)
 */
void generatePositiveData(final Mat frame, final int numWarps, final Grid aGrid) {
    resizeZeroMeanStdev(frame.submat(aGrid.getBestBox()), _pExample, _params.patch_size);
    //Get Fern features on warped patches
    final Mat img = new Mat();
    Imgproc.GaussianBlur(frame, img, new Size(9, 9), 1.5);
    final BoundingBox bbhull = aGrid.getBBhull();
    final Mat warped = img.submat(bbhull);
    // centre of the hull
    final Point pt = new Point(bbhull.x + (bbhull.width - 1) * 0.5f, bbhull.y + (bbhull.height - 1) * 0.5f);

    _pFerns.clear();
    _pPatterns.clear();

    for (int i = 0; i < numWarps; i++) {
        if (i > 0) {
            // this is important as it introduces the necessary noise / fuziness in the initial examples such that the Fern classifier recognises similar shapes not only Exact ones ! 
            // warped is a reference to a subset of the img data, so this will affect the img object
            _patchGenerator.generate(frame, pt, warped, bbhull.size(), _rng);
        }

        final BoundingBox[] goodBoxes = aGrid.getGoodBoxes();
        for (BoundingBox goodBox : goodBoxes) {
            final Mat patch = img.submat(goodBox);
            final int[] allFernsHashCodes = _classifierFern.getAllFernsHashCodes(patch, goodBox.scaleIdx);
            _pFerns.add(new Pair<int[], Boolean>(allFernsHashCodes, true));

            //            // this will be used for display only
            //            final Mat tempPattern = new Mat();
            //            Imgproc.resize(patch, tempPattern, new Size(_params.patch_size, _params.patch_size));
            //            _pPatterns.add(tempPattern);
        }
    }

    Log.i(Util.TAG, "Positive examples generated( ferns: " + _pFerns.size() + " NN: 1/n )");
}

From source file:com.ttolley.pongbot.opencv.CvWorker.java

@Override
protected Void doInBackground() throws Exception {
    try {/*from  ww w  . jav a2  s . c o  m*/
        //-- 2. Read the video stream  
        Mat webcam_image = new Mat();

        if (capture.isOpened()) {
            while (true) {
                capture.read(webcam_image);
                if (!webcam_image.empty()) {
                    PublishObject publishObject = new PublishObject(webcam_image);
                    for (Map.Entry<FilterType, Filter> entry : filters.entrySet()) {
                        Mat hsv_image = new Mat();
                        Mat thresholded = new Mat();
                        Filter filter = entry.getValue();
                        // One way to select a range of colors by Hue  
                        Imgproc.cvtColor(webcam_image, hsv_image, Imgproc.COLOR_BGR2HSV);
                        Core.inRange(hsv_image, filter.hsv_min, filter.hsv_max, thresholded);
                        // Morph open
                        final Size erodeSizeObj = new Size(filter.erodeSize, filter.erodeSize);
                        final Size dilateSizeObj = new Size(filter.dilateSize, filter.dilateSize);
                        Imgproc.erode(thresholded, thresholded,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, erodeSizeObj));
                        Imgproc.dilate(thresholded, thresholded,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, erodeSizeObj));
                        // Morph close
                        Imgproc.dilate(thresholded, thresholded,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, dilateSizeObj));
                        Imgproc.erode(thresholded, thresholded,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, dilateSizeObj));

                        Mat temp = new Mat();
                        thresholded.copyTo(temp);
                        List<MatOfPoint> contours = new ArrayList();
                        Mat heirarchy = new Mat();
                        Imgproc.findContours(temp, contours, heirarchy, Imgproc.RETR_TREE,
                                Imgproc.CHAIN_APPROX_SIMPLE);
                        FilteredObject.Target largestTarget = findTarget(contours, webcam_image, filter);
                        publishObject.addObject(entry.getKey(), new FilteredObject(largestTarget, thresholded));
                    }
                    publish(publishObject);

                } else {
                    System.out.println(" --(!) No captured frame -- Break!");
                    break;
                }
            }
        }
    } catch (Exception ex) {
        System.out.println("Unable to loop");
        System.out.println(getStackTrace(ex));
    }
    return null;
}