Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

@Override
public Mat applyMathMorphology(Integer radius) {
    Mat dest = new Mat();
    int instrumentSize = radius * 2 + 1;
    Mat kernel = getStructuringElement(Imgproc.CV_SHAPE_ELLIPSE, new Size(instrumentSize, instrumentSize),
            new Point(radius, radius));

    Imgproc.morphologyEx(currentImage, dest, MORPH_CLOSE, kernel, new Point(-1, -1), 1);

    dest.copyTo(currentImage);//from   www .  ja v  a  2 s .c o  m
    dest.release();
    return currentImage;
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private void detectAstrocytesOld(Mat source, Integer averageRectSize, Double averageArea, int intensity) {
    if (source.channels() == 3) {
        source = CoreOperations.grayscale(source);
    }//from   w  w w .  j  a  v  a2 s  .c om

    astrocytesCenters = new ArrayList<>();
    List<MatOfPoint> contoursAfterFirstIteration = new ArrayList<>();
    Mat hierarchy = new Mat();

    /* Step 1 */
    findContours(source, contoursAfterFirstIteration, hierarchy, Imgproc.RETR_LIST,
            Imgproc.CHAIN_APPROX_TC89_L1);

    for (MatOfPoint contour : contoursAfterFirstIteration) {
        Rect boundingRectangle = boundingRect(contour);
        Double contourArea = contourArea(contour);
        Double contourPerimeter = arcLength(new MatOfPoint2f(contour.toArray()), true);

        /* Step 2 */
        if (averageArea - 160 <= contourArea /*&& contourArea <= averageArea + 10*/) {
            /* Step 3 */
            if (((averageRectSize - 15 <= boundingRectangle.width)
                    && (boundingRectangle.width <= averageRectSize + 15)
                    || (averageRectSize - 15 <= boundingRectangle.height)
                            && (boundingRectangle.height <= averageRectSize + 15))
                    && (boundingRectangle.width / (float) boundingRectangle.height < 1.8f)
                    && (boundingRectangle.height / (float) boundingRectangle.width < 1.8f)) {
                /* Step 4 */
                if (contourArea / (contourPerimeter * contourPerimeter) > 0.05
                        && contourArea / (contourPerimeter * contourPerimeter) < 0.30) {
                    int averageIntensityWithinContour = CoreOperations.averageIntensity(sourceImage, contour);

                    /* Step 5 */
                    if (averageIntensityWithinContour <= intensity + 20) {
                        int xCoordOfAstrocyteCenter = (int) boundingRectangle.tl().x
                                + boundingRectangle.width / 2;
                        int yCoordOfAstrocyteCenter = (int) boundingRectangle.tl().y
                                + boundingRectangle.height / 2;
                        astrocytesCenters.add(new Point(xCoordOfAstrocyteCenter, yCoordOfAstrocyteCenter));
                    }
                }
            }
        }
    }
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private List<Neuron> findNeuronsInStep(Mat source, int stepRadius) {
    List<Neuron> neurons = new ArrayList<Neuron>();

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    findContours(CoreOperations.grayscale(source), contours, hierarchy, Imgproc.RETR_LIST,
            Imgproc.CHAIN_APPROX_TC89_L1);

    for (MatOfPoint contour : contours) {
        Rect boundingRectangle = boundingRect(contour);
        int xCenter = boundingRectangle.x + boundingRectangle.width / 2;
        int yCenter = boundingRectangle.y + boundingRectangle.height / 2;
        neurons.add(new Neuron(new Point(xCenter, yCenter), stepRadius));
    }/*from w w w .j  a va 2s  . c  o m*/

    return neurons;
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private void findAstrocytes(Mat src) {
    astrocytesCenters = new ArrayList<Point>();

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    findContours(CoreOperations.grayscale(src), contours, hierarchy, Imgproc.RETR_LIST,
            Imgproc.CHAIN_APPROX_TC89_L1);

    for (MatOfPoint contour : contours) {
        Rect boundingRectangle = boundingRect(contour);
        Double contourArea = contourArea(contour);
        Double contourPerimeter = arcLength(new MatOfPoint2f(contour.toArray()), true);

        if (contourArea / (contourPerimeter * contourPerimeter) > 0.05
                && contourArea / (contourPerimeter * contourPerimeter) < 0.30) {
            int xCenter = boundingRectangle.x + boundingRectangle.width / 2;
            int yCenter = boundingRectangle.y + boundingRectangle.height / 2;
            astrocytesCenters.add(new Point(xCenter, yCenter));
        }//from www .j a v a  2  s .  c o  m
    }
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private Mat applyKmeans(Mat source) {
    Mat dest = new Mat();

    source.convertTo(source, CvType.CV_32F, 1.0 / 255.0);

    Mat centers = new Mat();
    Mat labels = new Mat();
    TermCriteria criteria = new TermCriteria(TermCriteria.COUNT, 20, 0.1);
    Core.kmeans(source, 4, labels, criteria, 10, Core.KMEANS_PP_CENTERS, centers);

    List<Mat> mats = showClusters(source, labels, centers);
    //mats.get(0).convertTo(dest, CvType.CV_8UC3);
    Core.merge(mats, dest);/*from   w ww  .ja v  a2  s  . c  om*/
    //centers.convertTo(dest, CvType.CV_8UC3);
    return dest;
}

From source file:com.blogspot.thedsweb.engine.Brightness.java

License:Open Source License

private int captureAndCalculate() {
    final Mat frame = new Mat();

    // Initialize video capturing and set a small image size
    final VideoCapture cap = new VideoCapture(0);
    cap.set(3, 160);//from   ww  w  . j av a  2s. c  o m
    cap.set(4, 120);

    int meanValue = current;
    face = true;

    // Return current as meanValue if camera start fail
    if (!cap.isOpened()) {
        return meanValue;
    }

    capture10thFrame(cap, frame);

    // Calculate mean value of frame
    meanValue = meanCalculation(frame);

    // If the current value change in a extreme way
    // set it again
    if (control(meanValue)) {
        capture10thFrame(cap, frame);

        // Re-Calculate mean value of frame
        meanValue = meanCalculation(frame);
    }

    // Set true if someone's Face is detected
    face = probability.detectFace(frame, meanValue);

    // Release the camera for other programs
    cap.release();

    return meanValue;
}

From source file:com.blogspot.thedsweb.engine.Brightness.java

License:Open Source License

private int meanCalculation(Mat rgb) {
    // Convert RGB to YcrCb for easier luminance calculation
    final Mat yCrCb = new Mat();
    Imgproc.cvtColor(rgb, yCrCb, Imgproc.COLOR_RGB2YCrCb);

    // Calculate luminance
    final Scalar mainMean = Core.mean(yCrCb);
    int meanLumaValue = (int) mainMean.val[0];

    // Test if backlit conditions are true
    if (!firstRun && backlitDetection(yCrCb, mainMean, meanLumaValue)) {
        Debug.LOG.log(Level.CONFIG, "Backlit detected.");
        backlit = true;//w  w w  .j  av a  2 s.  c om
        if (meanLumaValue < current) {
            meanLumaValue = current;
        }
    } else {
        backlit = false;
    }

    // Set the first run parameter to false so that the next time the mean
    // calculation method is called the backlit detection is also running
    if (firstRun) {
        firstRun = false;
    }

    // If the frame is completely black or white the camera must already in
    // use by another program
    if (meanLumaValue == 0) {
        meanLumaValue = current;
    }

    return meanLumaValue;
}

From source file:com.carver.paul.truesight.ImageRecognition.ImageTools.java

License:Open Source License

public static Bitmap GetBitmapFromMat(Mat mat, boolean convertColor) {
    Bitmap bitmap = Bitmap.createBitmap(mat.cols(), mat.rows(), Bitmap.Config.ARGB_8888);
    if (convertColor) {
        Mat finalColourMat = new Mat();
        Imgproc.cvtColor(mat, finalColourMat, Imgproc.COLOR_BGR2RGB);
        matToBitmap(finalColourMat, bitmap);
    } else {/*from w w w. j  av  a 2  s .  c o  m*/
        matToBitmap(mat, bitmap);
    }

    return bitmap;
}

From source file:com.carver.paul.truesight.ImageRecognition.ImageTools.java

License:Open Source License

public static Mat GetMatFromBitmap(Bitmap bitmap) {
    Mat mat = new Mat();
    bitmapToMat(bitmap, mat);/*from   www.  j  a  v a 2 s  .  c  o  m*/
    Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGR);
    return mat;
}

From source file:com.carver.paul.truesight.ImageRecognition.RecognitionModel.java

License:Open Source License

public static List<Mat> findHeroTopLinesInImage(Mat photo, List<List<Integer>> colourRanges, int lowerHsvS,
        int lowerHsvV, int upperHsvS, int upperHsvV) {
    List<Mat> linesList = new ArrayList<>();
    int pos = 0;/*from   w  w  w  . java2s . co m*/
    int photoWidth = photo.width();

    for (List<Integer> colourRange : colourRanges) {
        int minX;
        int maxX;

        if (colourRanges.size() == 1) {
            minX = 0;
            maxX = photoWidth;
        } else {
            minX = pos * photoWidth / 6;
            maxX = (2 + pos) * photoWidth / 6;
        }

        Scalar lowerHsv = new Scalar(colourRange.get(0), lowerHsvS, lowerHsvV);
        Scalar upperHsv = new Scalar(colourRange.get(1), upperHsvS, upperHsvV);

        Mat subMat = photo.submat(0, photo.height() / 2, minX, maxX);
        Mat mask = new Mat();
        ImageTools.MaskAColourFromImage(subMat, lowerHsv, upperHsv, mask);

        Mat lines = new Mat();

        // note, this is the part that takes most time.
        ImageTools.getLineFromTopRectMask(mask, lines, photoWidth / 7); //USED TO BE 8!!!!

        adjustXPosOfLines(lines, minX);
        linesList.add(lines);

        //   Main.DrawMatInImageBox(mask, maskImage); // just for debug
        pos++;
    }

    return linesList;
}