Example usage for org.opencv.core Mat channels

List of usage examples for org.opencv.core Mat channels

Introduction

In this page you can find the example usage for org.opencv.core Mat channels.

Prototype

public int channels() 

Source Link

Usage

From source file:de.hftl_projekt.ict.MainActivity.java

/**
 * method to reduce the color (quantize) the given matrix (image)
 * @param image input matrix/*from  ww  w  .  ja  v a 2s .  c  o  m*/
 * @return modified input matrix
 */
public Mat reduceColors(Mat image) {
    if (channels.size() == 0) {
        for (int i = 0; i < image.channels(); i++) {
            Mat channel = new Mat(); // fill array with a matrix for each channel
            channels.add(channel);
        }

    }
    int i = 0;
    // process each channel individually
    for (Mat c : channels) {
        Core.extractChannel(image, c, i);
        // binary quantization (set threshold so each color (R, G, B) can have the value (0 or 255) )
        // and using the Otsu algorithm to optimize the quantization
        Imgproc.threshold(c, c, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU);
        i++;
    }
    Core.merge(channels, image); // put the channel back together
    return image;
}

From source file:depthDataFromStereoCamsOpenCV.ProcessImages.java

/**
 * Mat2BufferedImage//from w  ww . ja  v  a  2s.  co  m
 * @param Mat m
 * @return BufferedImage
 */

public static BufferedImage Mat2BufferedImage(Mat m) {
    // source: http://answers.opencv.org/question/10344/opencv-java-load-image-to-gui/
    // Fastest code
    // The output can be assigned either to a BufferedImage or to an Image

    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;

}

From source file:detectiontest.ImageDisplayer.java

public static BufferedImage toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*w w w  . j  a  va  2  s  .c  om*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:dfmDrone.examples.fitEllipseExample.java

public static Image toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1)
        type = BufferedImage.TYPE_3BYTE_BGR;

    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:digimesh.xbee.gui.MeshGui.java

private BufferedImage Mat2BufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from   ww  w . j  a v a2  s  .  co  m*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:digitalassistant.Panel.java

/**
 * Converts/writes a Mat into a BufferedImage.
 *
 * @param matrix Mat of type CV_8UC3 or CV_8UC1
 * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
 *///from  w w w. j  a  va2  s.  c o m
public BufferedImage matToBufferedImage(Mat matrix) {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        // bgr to rgb  
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        return null;
    }
    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);
    return image;
}

From source file:digitalassistant.Panel.java

public BufferedImage matToBufferedImage(Mat matrix) {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;/*w ww.  ja  va2s .co  m*/
    matrix.get(0, 0, data);
    switch (matrix.channels()) {
    case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;
    case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;
        // bgr to rgb  
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
            b = data[i];
            data[i] = data[i + 2];
            data[i + 2] = b;
        }
        break;
    default:
        return null;
    }
    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);
    return image;
}

From source file:edu.wpi.cscore.RawCVMatSource.java

License:Open Source License

/**
 * Put an OpenCV image and notify sinks.
 *
 * <p>Only 8-bit single-channel or 3-channel (with BGR channel order) images
 * are supported. If the format, depth or channel order is different, use
 * Mat.convertTo() and/or cvtColor() to convert it first.
 *
 * @param image OpenCV image/*  w  w  w  . j  av  a  2s.co  m*/
 */
public void putFrame(Mat image) {
    int channels = image.channels();
    if (channels != 1 && channels != 3) {
        throw new VideoException("Unsupported Image Type");
    }
    int imgType = channels == 1 ? PixelFormat.kGray.getValue() : PixelFormat.kBGR.getValue();
    CameraServerJNI.putRawSourceFrame(m_handle, image.dataAddr(), image.width(), image.height(), imgType,
            (int) image.total() * channels);
}

From source file:facedetection.FaceDetector.java

public BufferedImage mat2BufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/* w w  w .  java  2 s.c  o  m*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;

}

From source file:faceDetectionV1.FaceDetection.java

private BufferedImage convertMatToImage(Mat matrix) {
    int type = BufferedImage.TYPE_BYTE_GRAY;

    if (matrix.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from   w w  w.  jav  a  2s.c  o  m*/

    int bufferSize = matrix.channels() * matrix.cols() * matrix.rows();
    byte bytes[] = new byte[bufferSize];
    matrix.get(0, 0, bytes);
    BufferedImage image = new BufferedImage(matrix.cols(), matrix.rows(), type);
    final byte targetsize[] = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(bytes, 0, targetsize, 0, bytes.length);
    return image;
}