Example usage for org.opencv.core Mat channels

List of usage examples for org.opencv.core Mat channels

Introduction

In this page you can find the example usage for org.opencv.core Mat channels.

Prototype

public int channels() 

Source Link

Usage

From source file:org.sleuthkit.autopsy.coreutils.VideoUtils.java

License:Open Source License

@NbBundle.Messages({ "# {0} - file name",
        "VideoUtils.genVideoThumb.progress.text=extracting temporary file {0}" })
static BufferedImage generateVideoThumbnail(AbstractFile file, int iconSize) {
    java.io.File tempFile = getTempVideoFile(file);
    if (tempFile.exists() == false || tempFile.length() < file.getSize()) {
        ProgressHandle progress = ProgressHandle
                .createHandle(Bundle.VideoUtils_genVideoThumb_progress_text(file.getName()));
        progress.start(100);//from w w  w. j a  v a 2  s  . co m
        try {
            Files.createParentDirs(tempFile);
            ContentUtils.writeToFile(file, tempFile, progress, null, true);
        } catch (IOException ex) {
            LOGGER.log(Level.WARNING,
                    "Error extracting temporary file for " + ImageUtils.getContentPathSafe(file), ex); //NON-NLS
        } finally {
            progress.finish();
        }
    }

    VideoCapture videoFile = new VideoCapture(); // will contain the video

    if (!videoFile.open(tempFile.toString())) {
        LOGGER.log(Level.WARNING, "Error opening {0} for preview generation.",
                ImageUtils.getContentPathSafe(file)); //NON-NLS
        return null;
    }
    double fps = videoFile.get(CV_CAP_PROP_FPS); // gets frame per second
    double totalFrames = videoFile.get(CV_CAP_PROP_FRAME_COUNT); // gets total frames
    if (fps <= 0 || totalFrames <= 0) {
        LOGGER.log(Level.WARNING, "Error getting fps or total frames for {0}",
                ImageUtils.getContentPathSafe(file)); //NON-NLS
        return null;
    }
    double milliseconds = 1000 * (totalFrames / fps); //total milliseconds

    double timestamp = Math.min(milliseconds, 500); //default time to check for is 500ms, unless the files is extremely small

    int framkeskip = Double.valueOf(Math.floor((milliseconds - timestamp) / (THUMB_COLUMNS * THUMB_ROWS)))
            .intValue();

    Mat imageMatrix = new Mat();
    BufferedImage bufferedImage = null;

    for (int x = 0; x < THUMB_COLUMNS; x++) {
        for (int y = 0; y < THUMB_ROWS; y++) {
            if (!videoFile.set(CV_CAP_PROP_POS_MSEC,
                    timestamp + x * framkeskip + y * framkeskip * THUMB_COLUMNS)) {
                LOGGER.log(Level.WARNING, "Error seeking to " + timestamp + "ms in {0}",
                        ImageUtils.getContentPathSafe(file)); //NON-NLS
                break; // if we can't set the time, return black for that frame
            }
            //read the frame into the image/matrix
            if (!videoFile.read(imageMatrix)) {
                LOGGER.log(Level.WARNING, "Error reading frames at " + timestamp + "ms from {0}",
                        ImageUtils.getContentPathSafe(file)); //NON-NLS
                break; //if the image for some reason is bad, return black for that frame
            }

            if (bufferedImage == null) {
                bufferedImage = new BufferedImage(imageMatrix.cols() * THUMB_COLUMNS,
                        imageMatrix.rows() * THUMB_ROWS, BufferedImage.TYPE_3BYTE_BGR);
            }

            byte[] data = new byte[imageMatrix.rows() * imageMatrix.cols() * (int) (imageMatrix.elemSize())];
            imageMatrix.get(0, 0, data); //copy the image to data

            //todo: this looks like we are swapping the first and third channels.  so we can use  BufferedImage.TYPE_3BYTE_BGR
            if (imageMatrix.channels() == 3) {
                for (int k = 0; k < data.length; k += 3) {
                    byte temp = data[k];
                    data[k] = data[k + 2];
                    data[k + 2] = temp;
                }
            }

            bufferedImage.getRaster().setDataElements(imageMatrix.cols() * x, imageMatrix.rows() * y,
                    imageMatrix.cols(), imageMatrix.rows(), data);
        }
    }

    videoFile.release(); // close the file

    return bufferedImage == null ? null : ScalrWrapper.resizeFast(bufferedImage, iconSize);
}

From source file:processdata.ExperimentalDataProcessingUI.java

public static BufferedImage Mat2BufferedImage(Mat m) {
    // source: http://answers.opencv.org/question/10344/opencv-java-load-image-to-gui/
    // Fastest code
    // The output can be assigned either to a BufferedImage or to an Image

    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }//from  www . j a v a2  s . co m
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;

}

From source file:samples.FtcTestOpenCv.java

License:Open Source License

/**
 * This method combines an overlay image to the given background image at the specified location.
 * It is expecting both the background and overlay are color images. It also expects the overlay
 * image contains an alpha channel for opacity information.
 *
 * @param background specifies the background image.
 * @param overlay specifies the overlay image.
 * @param locX specifies the X location on the background image where the upper left corner of the overlay
 *        image should be at/*from  w w w . j a  v a2  s . c o  m*/
 * @param locY specifies the Y location on the backgorund image where the upper left corner of the overlay
 *        image should be at.
 */
private void combineImage(Mat background, Mat overlay, int locX, int locY) {
    //
    // Make sure the background image has at least 3 channels and the overlay image has
    // at least 4 channels.
    //
    if (background.channels() >= 3 && overlay.channels() >= 4) {
        //
        // For each row of the overlay image.
        //
        for (int row = 0; row < overlay.rows(); row++) {
            //
            // Calculate the corresponding row number of the background image.
            // Skip the row if it is outside of the background image.
            //
            int destRow = locY + row;
            if (destRow < 0 || destRow >= background.rows())
                continue;
            //
            // For each column of the overlay image.
            //
            for (int col = 0; col < overlay.cols(); col++) {
                //
                // Calculate the corresponding column number of background image.
                // Skip the column if it is outside of the background image.
                //
                int destCol = locX + col;
                if (destCol < 0 || destCol >= background.cols())
                    continue;
                //
                // Get the source pixel from the overlay image and the destination pixel from the
                // background image. Calculate the opacity as a percentage.
                //
                double[] srcPixel = overlay.get(row, col);
                double[] destPixel = background.get(destRow, destCol);
                double opacity = srcPixel[3] / 255.0;
                //
                // Merge the source pixel to the destination pixel with the proper opacity.
                // Each color pixel consists of 3 channels: BGR (Blue, Green, Red).
                // The fourth channel is opacity and is only applicable for the overlay image.
                //
                for (int channel = 0; channel < 3; channel++) {
                    destPixel[channel] = destPixel[channel] * (1.0 - opacity) + srcPixel[channel] * opacity;
                }
                //
                // Put the resulting pixel into the background image.
                //
                background.put(destRow, destCol, destPixel);
            }
        }
    } else {
        throw new RuntimeException(
                "Invalid image format (src=" + overlay.channels() + ",dest=" + background.channels() + ").");
    }
}

From source file:sanntidvideo.Main.java

public BufferedImage toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*  w w w.  j av a2  s .c o m*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:sanntidvideo.VideoCap.java

public Image toBufferedImage(Mat m) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from  w w w .j a  v a 2 s .c o  m*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:servershootingstar.BallDetector.java

public static BufferedImage Mat2BufferedImage(Mat m) {
    // source: http://answers.opencv.org/question/10344/opencv-java-load-image-to-gui/
    // The output can be assigned either to a BufferedImage or to an Image

    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/* w w  w .  j  ava 2  s . c o m*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;
}

From source file:src.main.java.org.roomwatcher.watcher.Watcher.java

/**  
* Converts/writes a Mat into a BufferedImage.  
*   /*from  w  w  w.jav a 2 s.  c  om*/
* @param matrix Mat of type CV_8UC3 or CV_8UC1  
* @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY  
*/
public boolean MatToBufferedImage(Mat matBGR) {
    long startTime = System.nanoTime();
    int width = matBGR.width(), height = matBGR.height(), channels = matBGR.channels();
    byte[] sourcePixels = new byte[width * height * channels];
    matBGR.get(0, 0, sourcePixels);

    // create new image and get reference to backing data  
    image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);
    long endTime = System.nanoTime();

    //System.out.println(String.format("Elapsed time: %.2f ms", (float)(endTime - startTime)/1000000));  
    return true;
}

From source file:syncleus.dann.data.video.TLDUtil.java

License:Apache License

/**
 * The corresponding Java primitive array type depends on the Mat type:
 * CV_8U and CV_8S -> byte[]//ww  w  .j a v a2 s  .c  o m
 * CV_16U and CV_16S -> short[]
 * CV_32S -> int[]
 * CV_32F -> float[]
 * CV_64F-> double[]
 */
public static byte[] getByteArray(final Mat mat) {
    if (CvType.CV_8UC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_8UC1, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_byteBuff.length != size) {
        _byteBuff = new byte[size];
    }
    mat.get(0, 0, _byteBuff); // 0 for row and col means the WHOLE Matrix
    return _byteBuff;
}

From source file:syncleus.dann.data.video.TLDUtil.java

License:Apache License

public static int[] getIntArray(final Mat mat) {
    if (CvType.CV_32SC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_32SC1, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_intBuff.length != size) {
        _intBuff = new int[size];
    }/*  w w  w.j  a va2s . co  m*/
    mat.get(0, 0, _intBuff); // 0 for row and col means the WHOLE Matrix
    return _intBuff;
}

From source file:syncleus.dann.data.video.TLDUtil.java

License:Apache License

public static float[] getFloatArray(final Mat mat) {
    if (CvType.CV_32FC1 != mat.type())
        throw new IllegalArgumentException(
                "Expected type is CV_32FC1, we found: " + CvType.typeToString(mat.type()));

    final int size = (int) (mat.total() * mat.channels());
    if (_floatBuff.length != size) {
        _floatBuff = new float[size];
    }/*from  ww  w. ja  va2s .  c  o m*/
    mat.get(0, 0, _floatBuff); // 0 for row and col means the WHOLE Matrix
    return _floatBuff;
}