Example usage for org.opencv.core Mat channels

List of usage examples for org.opencv.core Mat channels

Introduction

In this page you can find the example usage for org.opencv.core Mat channels.

Prototype

public int channels() 

Source Link

Usage

From source file:com.ibm.streamsx.edgevideo.device.MyPanel.java

License:Open Source License

public boolean matToBufferedImage(Mat mat, int bufferedImageType) {
    int width = mat.width(), height = mat.height(), channels = mat.channels();
    byte[] sourcePixels = new byte[width * height * channels];
    mat.get(0, 0, sourcePixels);//from  w  w w .  j  av a 2  s .co  m
    // create new image and get reference to backing data  
    image = new BufferedImage(width, height, bufferedImageType);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);
    return true;
}

From source file:com.jiminger.image.ImageFile.java

License:Open Source License

public static BufferedImage readBufferedImageFromFile(final String filename) throws IOException {
    LOGGER.trace("Reading image from {}", filename);
    final File f = new File(filename);
    if (!f.exists())
        throw new FileNotFoundException(filename);
    BufferedImage ret = ImageIO.read(f);
    if (ret == null) {
        LOGGER.info("Failed to read '{}' using ImageIO", filename);
        try (Closer closer = new Closer()) {
            final Mat mat = Imgcodecs.imread(filename, IMREAD_UNCHANGED);
            if (mat == null)
                throw new IllegalArgumentException("Can't read '" + filename
                        + "' as an image. No codec available in either ImageIO or OpenCv");
            if (filename.endsWith(".jp2") && CvType.channels(mat.channels()) > 1)
                Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGR);
            ret = Utils.mat2Img(mat);/*from w ww.  j a v a  2s. co m*/
        }
    }
    LOGGER.trace("Read {} from {}", ret, filename);
    return ret;
}

From source file:com.jiminger.image.ImageFile.java

License:Open Source License

public static CvRaster readMatFromFile(final String filename, final Closer closer) throws IOException {
    LOGGER.trace("Reading image from {}", filename);
    final File f = new File(filename);
    if (!f.exists())
        throw new FileNotFoundException(filename);

    final CvRaster ret;

    try (Closer cx = new Closer()) {
        final Mat mat = Imgcodecs.imread(filename, IMREAD_UNCHANGED);
        if (mat == null) {
            LOGGER.debug("Failed to read '" + filename + "' using OpenCV");
            ret = Utils.img2CvRaster(ImageIO.read(f));
        } else {// ww w . j  a va  2  s.  co m
            if (filename.endsWith(".jp2") && CvType.channels(mat.channels()) > 1)
                Imgproc.cvtColor(mat, mat, Imgproc.COLOR_RGB2BGR);
            ret = CvRaster.move(mat, closer);
        }
    }
    LOGGER.trace("Read {} from {}", ret, filename);
    return ret;
}

From source file:com.lauszus.facerecognitionapp.TinyDB.java

License:Apache License

public void putListMat(String key, ArrayList<Mat> objArray) {
    checkForNullKey(key);//from   www . j  a v  a2 s  .  c o  m
    ArrayList<String> objStrings = new ArrayList<String>();

    for (Mat mat : objArray) {
        int size = (int) (mat.total() * mat.channels());
        byte[] data = new byte[size];
        mat.get(0, 0, data);
        String dataString = new String(Base64.encode(data, Base64.DEFAULT));
        objStrings.add(dataString);
    }
    putListString(key, objStrings);
}

From source file:com.mycompany.analyzer.Analyzer.java

public BufferedImage mat2BufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }//  ww  w.  j a v a2s  . c o m
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:com.oetermann.imageclassifier.Util.java

License:Open Source License

public static void saveMat(String path, Mat mat) {
    File file = new File(path).getAbsoluteFile();
    file.getParentFile().mkdirs();/*from w  w  w .  j a v a2s  .  c o  m*/
    try {
        int rows = mat.rows();
        int cols = mat.cols();
        int type = mat.type();
        Object data;
        switch (mat.type()) {
        case CvType.CV_8S:
        case CvType.CV_8U:
            data = new byte[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (byte[]) data);
            break;
        case CvType.CV_16S:
        case CvType.CV_16U:
            data = new short[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (short[]) data);
            break;
        case CvType.CV_32S:
            data = new int[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (int[]) data);
            break;
        case CvType.CV_32F:
            data = new float[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (float[]) data);
            break;
        case CvType.CV_64F:
            data = new double[(int) mat.total() * mat.channels()];
            mat.get(0, 0, (double[]) data);
            break;
        default:
            data = null;
        }
        try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(path))) {
            oos.writeObject(rows);
            oos.writeObject(cols);
            oos.writeObject(type);
            oos.writeObject(data);
            oos.close();
        }
    } catch (IOException | ClassCastException ex) {
        System.err.println("ERROR: Could not save mat to file: " + path);
        //            Logger.getLogger(ImageClassifier.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.orange.documentare.core.image.Binarization.java

License:Open Source License

private static boolean isGreyscale(Mat mat) {
    return mat.channels() == 1;
}

From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java

License:Open Source License

private static byte[] computeImageBytesCount(Mat image, boolean raw) {
    int rawBytesCount = image.channels() * image.rows() * image.cols();
    int simDocExtra = raw ? image.rows() : 0;
    return new byte[rawBytesCount + simDocExtra];
}

From source file:com.orange.documentare.core.image.opencv.OpenCvImage.java

License:Open Source License

private static void fillByteArray(byte[] byteArray, Mat image, boolean raw) {
    int colsNb = image.cols();
    int bytesPerPixel = image.channels();
    int bytesPerRow = colsNb * bytesPerPixel + (raw ? 1 : 0);
    byte[] pixel = new byte[bytesPerPixel];
    int magicNumberOffset = 0;
    for (int y = 0; y < image.rows(); y++) {
        for (int x = 0; x < colsNb; x++) {
            image.get(y, x, pixel);/*from  w  ww. ja va 2 s .c o  m*/
            for (int z = 0; z < bytesPerPixel; z++) {
                byteArray[magicNumberOffset + y * bytesPerRow + x * bytesPerPixel + z] = pixel[z];
            }
        }
        if (raw) {
            byteArray[magicNumberOffset + y * bytesPerRow + colsNb * bytesPerPixel] = SIMDOC_LINE_TERMINATION;
        }
    }
}

From source file:com.shootoff.camera.shotdetection.JavaShotDetector.java

License:Open Source License

private Set<Pixel> findThresholdPixelsAndUpdateFilter(final Mat workingFrame, final boolean detectShots) {
    dynamicallyThresholded = 0;// ww w.  j a  v a2  s .  c  om

    final Set<Pixel> thresholdPixels = Collections.synchronizedSet(new HashSet<Pixel>());

    if (!cameraManager.isDetecting())
        return thresholdPixels;

    final int subWidth = workingFrame.cols() / SECTOR_COLUMNS;
    final int subHeight = workingFrame.rows() / SECTOR_ROWS;

    final int cols = workingFrame.cols();
    final int channels = workingFrame.channels();

    final int size = (int) (workingFrame.total() * channels);
    final byte[] workingFramePrimitive = new byte[size];
    workingFrame.get(0, 0, workingFramePrimitive);

    // In this loop we accomplish both MovingAverage updates AND threshold
    // pixel detection
    Parallel.forIndex(0, (SECTOR_ROWS * SECTOR_COLUMNS), 1, new Operation<Integer>() {
        public void perform(Integer sector) {
            final int sectorX = sector.intValue() % SECTOR_COLUMNS;
            final int sectorY = sector.intValue() / SECTOR_ROWS;

            if (!cameraManager.isSectorOn(sectorX, sectorY))
                return;

            final int startX = subWidth * sectorX;
            final int startY = subHeight * sectorY;

            for (int y = startY; y < startY + subHeight; y++) {
                final int yOffset = y * cols;
                for (int x = startX; x < startX + subWidth; x++) {
                    final int currentH = workingFramePrimitive[(yOffset + x) * channels] & 0xFF;
                    final int currentS = workingFramePrimitive[(yOffset + x) * channels + 1] & 0xFF;
                    final int currentV = workingFramePrimitive[(yOffset + x) * channels + 2] & 0xFF;

                    final Pixel pixel = updateFilter(currentH, currentS, currentV, x, y, detectShots);

                    if (pixel != null)
                        thresholdPixels.add(pixel);
                }
            }
        }
    });

    return thresholdPixels;
}