Example usage for org.opencv.core Mat channels

List of usage examples for org.opencv.core Mat channels

Introduction

In this page you can find the example usage for org.opencv.core Mat channels.

Prototype

public int channels() 

Source Link

Usage

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

/**
 * Converts a source color image to a gray image.
 *
 * @param src - BGR image./*from  w ww  . jav a2 s.  c om*/
 * @return gray image.
 */
public static Mat grayscale(Mat src) {
    if (src.channels() < 3)
        return src;
    Mat dest = new Mat(src.rows(), src.cols(), CvType.CV_8UC1);
    cvtColor(src, dest, COLOR_BGR2GRAY);
    return dest;
}

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

/**
 * Remove all small contours on binary image with areas less than specified threshold.
 *
 * @param src - binary source image.// ww w  .  j a va  2 s. c o  m
 * @param thresh - minimum area of contour.
 * @return a source image with removed all contours with area less than {@param thresh}.
 */
public static Mat clearContours(Mat src, int thresh) {
    if (src.channels() > 1)
        return src;

    Mat dest = src.clone();
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    findContours(src, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_TC89_L1);

    Mat maskWhite = new Mat(src.rows(), src.cols(), CvType.CV_8UC1, new Scalar(255));
    Mat maskBlack = maskWhite.clone();

    for (int i = 0; i < contours.size(); i++) {
        Double contourArea = contourArea(contours.get(i));

        if (contourArea < thresh) {
            int pixelColor = averageIntensity(src, contours.get(i));
            drawContours(pixelColor > 127 ? maskWhite : maskBlack, contours, i, new Scalar(0), Core.FILLED);
        }
    }

    maskWhite = erode(maskWhite, 2);
    maskBlack = erode(maskBlack, 2);
    dest = and(maskWhite, dest);
    dest = or(invert(maskBlack), dest);

    return dest;
}

From source file:com.astrocytes.core.operationsengine.CoreOperations.java

License:Open Source License

/**
 * Calculates an average intensity of pixels on image within specified contour.
 *
 * @param src - source image used for calculating an average intensity.
 * @param contour - a contour which is presented as some region of interest for operation.
 * @return a level of average intensity.
 *///from www. j a  v a 2  s  .  c o  m
public static int averageIntensity(Mat src, MatOfPoint contour) {
    int averageIntensityWithinContour = 0;
    int quantityOfPixelsWithinContour = 0;
    Rect boundingRectangle = boundingRect(contour);

    for (int xCoord = (int) boundingRectangle.tl().x; xCoord <= (int) boundingRectangle.br().x; xCoord++) {
        for (int yCoord = (int) boundingRectangle.tl().y; yCoord <= (int) boundingRectangle.br().y; yCoord++) {
            if (pointPolygonTest(new MatOfPoint2f(contour.toArray()), new Point(xCoord, yCoord), false) > 0) {
                averageIntensityWithinContour += intensity(src, xCoord, yCoord);
                quantityOfPixelsWithinContour++;
            }
        }
    }

    if (quantityOfPixelsWithinContour == 0) {
        quantityOfPixelsWithinContour = 1;
        averageIntensityWithinContour = intensity(src, boundingRectangle.x, boundingRectangle.y);
        if (src.channels() == 1) {
            averageIntensityWithinContour = averageIntensityWithinContour > 127 ? 0 : 255;
        }
    }

    return averageIntensityWithinContour / quantityOfPixelsWithinContour;
}

From source file:com.astrocytes.core.operationsengine.OperationsImpl.java

License:Open Source License

private void detectAstrocytesOld(Mat source, Integer averageRectSize, Double averageArea, int intensity) {
    if (source.channels() == 3) {
        source = CoreOperations.grayscale(source);
    }//from www  .  j av a 2s . c o m

    astrocytesCenters = new ArrayList<>();
    List<MatOfPoint> contoursAfterFirstIteration = new ArrayList<>();
    Mat hierarchy = new Mat();

    /* Step 1 */
    findContours(source, contoursAfterFirstIteration, hierarchy, Imgproc.RETR_LIST,
            Imgproc.CHAIN_APPROX_TC89_L1);

    for (MatOfPoint contour : contoursAfterFirstIteration) {
        Rect boundingRectangle = boundingRect(contour);
        Double contourArea = contourArea(contour);
        Double contourPerimeter = arcLength(new MatOfPoint2f(contour.toArray()), true);

        /* Step 2 */
        if (averageArea - 160 <= contourArea /*&& contourArea <= averageArea + 10*/) {
            /* Step 3 */
            if (((averageRectSize - 15 <= boundingRectangle.width)
                    && (boundingRectangle.width <= averageRectSize + 15)
                    || (averageRectSize - 15 <= boundingRectangle.height)
                            && (boundingRectangle.height <= averageRectSize + 15))
                    && (boundingRectangle.width / (float) boundingRectangle.height < 1.8f)
                    && (boundingRectangle.height / (float) boundingRectangle.width < 1.8f)) {
                /* Step 4 */
                if (contourArea / (contourPerimeter * contourPerimeter) > 0.05
                        && contourArea / (contourPerimeter * contourPerimeter) < 0.30) {
                    int averageIntensityWithinContour = CoreOperations.averageIntensity(sourceImage, contour);

                    /* Step 5 */
                    if (averageIntensityWithinContour <= intensity + 20) {
                        int xCoordOfAstrocyteCenter = (int) boundingRectangle.tl().x
                                + boundingRectangle.width / 2;
                        int yCoordOfAstrocyteCenter = (int) boundingRectangle.tl().y
                                + boundingRectangle.height / 2;
                        astrocytesCenters.add(new Point(xCoordOfAstrocyteCenter, yCoordOfAstrocyteCenter));
                    }
                }
            }
        }
    }
}

From source file:com.example.afs.makingmusic.process.ImageGenerator.java

License:Open Source License

private BufferedImage toBufferedImage(Mat matrix) {
    int width = matrix.cols();
    int height = matrix.rows();
    int channels = matrix.channels();
    int bitmapSize = height * width * channels;
    byte[] bitmap = new byte[bitmapSize];
    matrix.get(0, 0, bitmap);//from www  .j av a2  s. c o m
    BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    WritableRaster raster = image.getRaster();
    DataBufferByte dataBuffer = (DataBufferByte) raster.getDataBuffer();
    System.arraycopy(bitmap, 0, dataBuffer.getData(), 0, bitmap.length);
    return image;
}

From source file:com.example.yannic.remotefacedetection.agent.FaceDetectionAgent.java

License:Open Source License

public static BufferedImage matToBufferedImage(Mat matrix, BufferedImage bimg) {
    if (matrix != null) {
        int cols = matrix.cols();
        int rows = matrix.rows();
        int elemSize = (int) matrix.elemSize();
        byte[] data = new byte[cols * rows * elemSize];
        int type;
        matrix.get(0, 0, data);// ww w .  java  2 s.  com
        switch (matrix.channels()) {
        case 1:
            type = BufferedImage.TYPE_BYTE_GRAY;
            break;
        case 3:
            type = BufferedImage.TYPE_3BYTE_BGR;
            // bgr to rgb
            byte b;
            for (int i = 0; i < data.length; i = i + 3) {
                b = data[i];
                data[i] = data[i + 2];
                data[i + 2] = b;
            }
            break;
        default:
            return null;
        }

        if (bimg == null || bimg.getWidth() != cols || bimg.getHeight() != rows || bimg.getType() != type) {
            bimg = new BufferedImage(cols, rows, type);
        }
        bimg.getRaster().setDataElements(0, 0, cols, rows, data);
    } else {
        bimg = null;
    }
    return bimg;
}

From source file:com.github.mbillingr.correlationcheck.ImageProcessor.java

License:Open Source License

Bitmap matToBitmap(Mat input) {
    if (input == null) {
        return Bitmap.createBitmap(0, 0, Bitmap.Config.ARGB_8888);
    }/*from   w ww .ja  v  a2s. c o  m*/
    Mat tmp = new Mat();
    if (input.channels() == 1) {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_GRAY2RGB);
    } else {
        Imgproc.cvtColor(input, tmp, Imgproc.COLOR_BGR2RGB);
    }
    Core.transpose(tmp, tmp);
    Core.flip(tmp, tmp, 1);

    Bitmap bm = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(tmp, bm);
    return bm;
}

From source file:com.github.rosjava_catkin_package_a.ARLocROS.Imshow.java

License:Apache License

/**
 * @param opencvImage/*from  w w  w . jav a  2 s . c om*/
 */
public static void show(Mat opencvImage) {

    Dimension frameSize = new Dimension(opencvImage.rows(), opencvImage.cols());
    if (frame == null) {
        frame = new Imshow("", frameSize.height, frameSize.width);
        frame.Window.setVisible(true);

        frame.Window.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
        if (frame.SizeCustom) {
            Imgproc.resize(opencvImage, opencvImage, new Size(frame.Height, frame.Width));
        }
    }
    BufferedImage bufImage = null;
    try {

        int type = BufferedImage.TYPE_BYTE_GRAY;
        if (opencvImage.channels() > 1) {
            type = BufferedImage.TYPE_3BYTE_BGR;
        }
        int bufferSize = opencvImage.channels() * opencvImage.cols() * opencvImage.rows();
        byte[] b = new byte[bufferSize];
        opencvImage.get(0, 0, b);
        BufferedImage bufferedImage = new BufferedImage(opencvImage.cols(), opencvImage.rows(), type);
        final byte[] targetPixels = ((DataBufferByte) bufferedImage.getRaster().getDataBuffer()).getData();
        System.arraycopy(b, 0, targetPixels, 0, b.length);
        bufImage = bufferedImage;
        frame.image.setImage(bufImage);
        frame.Window.pack();
        frame.label.updateUI();
        //frame.Window.setVisible(true);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.ibm.streamsx.edgevideo.device.edgent.JsonMat.java

License:Open Source License

public static JsonObject toJsonObject(Mat mat) {
    JsonObject jo = new JsonObject();
    jo.addProperty("width", mat.width());
    jo.addProperty("height", mat.height());
    jo.addProperty("type", mat.type());
    jo.addProperty("channels", mat.channels());
    jo.addProperty("depth", mat.depth());
    jo.addProperty("mat", base64MimeEncodeMat(mat));
    return jo;/*  ww  w . j  ava2  s  .  co m*/
}

From source file:com.ibm.streamsx.edgevideo.device.edgent.JsonMat.java

License:Open Source License

private static String base64MimeEncodeMat(Mat mat) {
    int width = mat.width(), height = mat.height(), channels = mat.channels();

    // resize if needed
    // With initial resize factor of 4 and being within 2' of the MBP camera,
    // a face image seems to be on the order of 15Kb.
    if (width * height * channels > 50 * 1024) {
        Mat smallerFace = new Mat();
        int resizeFactor = 2;
        Imgproc.resize(mat, smallerFace, new Size(mat.width() / resizeFactor, mat.height() / resizeFactor));
        mat = smallerFace;//from  ww  w .jav a  2s .com
        width = mat.width();
        height = mat.height();
        channels = mat.channels();
    }

    byte[] sourcePixels = new byte[width * height * channels];
    mat.get(0, 0, sourcePixels);

    // Base64 encode the image to be able to package in JsonObject
    // java.utils.Base64 since 1.8, otherwise use Apache Commons
    Encoder encoder = Base64.getMimeEncoder();
    String base64 = encoder.encodeToString(sourcePixels);

    //System.out.println("pub face bytes size: " + sourcePixels.length + " base64 size:" + base64.length());

    return base64;
}