Example usage for org.opencv.core Mat get

List of usage examples for org.opencv.core Mat get

Introduction

In this page you can find the example usage for org.opencv.core Mat get.

Prototype

public int get(int row, int col, double[] data) 

Source Link

Usage

From source file:org.ar.rubik.MonoChromatic.java

License:Open Source License

/**
 * Create submatrix using bytearray, then Mat.minmax().
 * This solution consumes about 10 seconds per frame.
 * //from  w  ww . j  av  a  2  s .co m
 * @param original_image
 * @return
 */
private static Mat monochromaticMedianImageFilterUtilizingOpenCv3(Mat original_image) {
    final Size imageSize = original_image.size();

    Mat monochromatic_image = new Mat(imageSize, CvType.CV_8UC1);
    Mat hsv_image = new Mat(imageSize, CvType.CV_8UC3);

    Imgproc.cvtColor(original_image, hsv_image, Imgproc.COLOR_RGB2HLS);
    //      Log.i(Constants.TAG, "HSV Image: " + hsv_image); // CV_8UC3

    // Try RGB below
    //      hsv_image = result;

    // Get hue channel into simple byte array for speed efficiency.
    final int numColumns = (int) original_image.size().width;
    final int numRows = (int) original_image.size().height;
    final int span = (int) 7;
    final int accuracy = (int) 5;
    List<Mat> channels = new LinkedList<Mat>();
    Core.split(hsv_image, channels);
    Mat hueMat = channels.get(0);
    Mat lumMat = channels.get(1);
    Mat satMat = channels.get(2);
    final int bufferSize = numColumns * numRows;
    byte[] hueByteArray = new byte[bufferSize];
    byte[] lumByteArray = new byte[bufferSize];
    byte[] satByteArray = new byte[bufferSize];
    hueMat.get(0, 0, hueByteArray); // get all the pixels
    lumMat.get(0, 0, lumByteArray); // get all the pixels
    satMat.get(0, 0, satByteArray); // get all the pixels

    // Output byte array for speed efficiency
    byte[] monochromaticByteArray = new byte[bufferSize];

    Mat subimageMat = new Mat(span, span, CvType.CV_8UC1);
    byte[] subimageByteArray = new byte[span * span];

    for (int row = 0; row < numRows; row++) {

        byte result_pixel = 0;

        for (int col = 0; col < numColumns; col++) {

            if (col < span || (col >= numColumns - span))
                result_pixel = 0; // Just put in black

            else if (row < span || (row >= numRows - span))
                result_pixel = 0; // Just put in black

            else {

                // Copy a row (or column)
                for (int i = 0; i < span; i++) {

                    // copy span bytes from (row + i) * numCol + col
                    int srcPos = (row + i) * numColumns + col;
                    int dstPos = i * span;
                    System.arraycopy(hueByteArray, srcPos, subimageByteArray, dstPos, span);
                }

                subimageMat.put(0, 0, subimageByteArray);
                Core.MinMaxLocResult minMaxResult = Core.minMaxLoc(subimageMat);

                if (((minMaxResult.maxVal - minMaxResult.maxVal) < accuracy)) //&& (lum_max - lum_min < accuracy) && (sat_max - sat_min < accuracy) )
                    result_pixel = (byte) 128;
                else
                    result_pixel = (byte) 0;

                //               Log.i(Constants.TAG, String.format("Lum %d %d", lum_min, lum_max));

            } // End of else

            if ((col >= span / 2) && (row >= span / 2))
                monochromaticByteArray[(row - span / 2) * numColumns + (col - span / 2)] = result_pixel;

            //            int test = (int)(satByteArray[row * numColumns + col]) & 0xFF;
            //            monochromaticByteArray[row * numColumns + (col - span/2)] = (byte) test;

        } // End of column sweep

    } // End of row sweep
    Log.i(Constants.TAG, "Completed MonoChromatic CV");
    monochromatic_image.put(0, 0, monochromaticByteArray);
    return monochromatic_image;
}

From source file:org.ar.rubik.MonoChromatic.java

License:Open Source License

/**
 * Simple algorithm in Java.  Java byte arrays of the original image
 * are obtain and operated on to then produce a resulting Java byte
 * array./*from  ww  w. j a  va  2 s. co  m*/
 * 
 * 
 * @param original_image
 * @return
 */
private static Mat monochromaticMedianImageFilterBruteForceInJava(Mat original_image) {

    final Size imageSize = original_image.size();

    Mat monochromatic_image = new Mat(imageSize, CvType.CV_8UC1);
    Mat hsv_image = new Mat(imageSize, CvType.CV_8UC3);

    Imgproc.cvtColor(original_image, hsv_image, Imgproc.COLOR_RGB2HLS);
    //      Log.i(Constants.TAG, "HSV Image: " + hsv_image); // CV_8UC3

    // Try RGB below
    //      hsv_image = result;

    // Get hue channel into simple byte array for speed efficiency.
    final int numColumns = (int) original_image.size().width;
    final int numRows = (int) original_image.size().height;
    List<Mat> channels = new LinkedList<Mat>();
    Core.split(hsv_image, channels);
    Mat hueMat = channels.get(0);
    Mat lumMat = channels.get(1);
    Mat satMat = channels.get(2);
    final int bufferSize = numColumns * numRows;
    byte[] hueByteArray = new byte[bufferSize];
    byte[] lumByteArray = new byte[bufferSize];
    byte[] satByteArray = new byte[bufferSize];
    hueMat.get(0, 0, hueByteArray); // get all the pixels
    lumMat.get(0, 0, lumByteArray); // get all the pixels
    satMat.get(0, 0, satByteArray); // get all the pixels

    // Output byte array for speed efficiency
    byte[] monochromaticByteArray = new byte[bufferSize];

    for (int row = 0; row < numRows; row++) {

        final int span = (int) 7;
        final int accuracy = (int) 5;

        byte result_pixel = 0;

        for (int col = 0; col < numColumns; col++) {

            if (col < span)
                result_pixel = 0; // Just put in black

            else if (row < span)
                result_pixel = 0; // Just put in black

            else {

                int hue_min = 255;
                int hue_max = 0;
                int lum_min = 255;
                int lum_max = 0;
                //               int sat_min = 255;
                //               int sat_max = 0;

                for (int i = 0; i < span; i++) {

                    for (int j = 0; j < span; j++) {

                        int hue = (int) hueByteArray[(row - j) * numColumns + (col - i)] & 0xFF;
                        if (hue > hue_max)
                            hue_max = hue;
                        if (hue < hue_min)
                            hue_min = hue;

                        int lum = (int) lumByteArray[(row - j) * numColumns + (col - i)] & 0xFF;
                        if (lum > lum_max)
                            lum_max = lum;
                        if (lum < lum_min)
                            lum_min = lum;

                        // =+= Saturation does not look correct when veiw as gray scale image.  Not sure what is going on.
                        //                  int sat = (int)satByteArray[row * numColumns + (col - i) ] & 0xFF;
                        //                  if(sat > sat_max)
                        //                     sat_max = sat;
                        //                  if(sat < sat_min)
                        //                     sat_min = sat;

                    } // End of row min/max sweep
                } // End of column min/max sweep

                if ((hue_max - hue_min < accuracy)) //&& (lum_max - lum_min < accuracy) && (sat_max - sat_min < accuracy) )
                    result_pixel = (byte) 128;
                else
                    result_pixel = (byte) 0;

                // Eliminate all black areas from consideration even if they are very flat.
                // For some reason, keying off minimum lumosity works best.   
                if (lum_min < 30)
                    result_pixel = 0;

                //               Log.i(Constants.TAG, String.format("Lum %d %d", lum_min, lum_max));

            } // End of else

            if ((col >= span / 2) && (row >= span / 2))
                monochromaticByteArray[(row - span / 2) * numColumns + (col - span / 2)] = result_pixel;

            //            int test = (int)(satByteArray[row * numColumns + col]) & 0xFF;
            //            monochromaticByteArray[row * numColumns + (col - span/2)] = (byte) test;

        } // End of column sweep

    } // End of row sweep

    monochromatic_image.put(0, 0, monochromaticByteArray);
    return monochromatic_image;
}

From source file:org.openpnp.vision.FluentCv.java

License:Open Source License

/**
 * From FireSight: https://github.com/firepick1/FireSight/wiki/op-Sharpness
 * //from  w  w  w  .  j  ava2  s  .co  m
 * @param image
 * @return
 */
public static double calculateSharpnessGRAS(Mat image) {
    int sum = 0;
    Mat matGray = new Mat();

    if (image.channels() == 1) {
        matGray = image;
    } else {
        Imgproc.cvtColor(image, matGray, Imgproc.COLOR_BGR2GRAY);
    }

    byte[] b1 = new byte[1];
    byte[] b2 = new byte[1];
    for (int r = 0; r < matGray.rows(); r++) {
        for (int c = 0; c < matGray.cols() - 1; c++) {
            matGray.get(r, c, b1);
            matGray.get(r, c + 1, b2);
            int df = (int) b1[0] - (int) b2[0];
            sum += df * df;
        }
    }

    return ((double) sum / matGray.rows() / (matGray.cols() - 1));
}

From source file:org.sikuli.android.ADBDevice.java

License:MIT License

public BufferedImage captureDeviceScreen(int x, int y, int w, int h) {
    Mat matImage = captureDeviceScreenMat(x, y, w, h);
    BufferedImage bImage = null;//from www  . ja  va2 s .c  o m
    if (matImage != null) {
        bImage = new BufferedImage(matImage.width(), matImage.height(), BufferedImage.TYPE_3BYTE_BGR);
        byte[] bImageData = ((DataBufferByte) bImage.getRaster().getDataBuffer()).getData();
        matImage.get(0, 0, bImageData);
    }
    return bImage;
}

From source file:org.sikuli.script.Finder.java

License:MIT License

private static void printMatI(Mat mat) {
    int[] data = new int[mat.channels()];
    for (int r = 0; r < mat.rows(); r++) {
        for (int c = 0; c < mat.cols(); c++) {
            mat.get(r, c, data);
            log(lvl, "(%d, %d) %s", r, c, Arrays.toString(data));
        }/*w ww.j a  v  a2 s .  co  m*/
    }
}

From source file:org.sleuthkit.autopsy.coreutils.VideoUtils.java

License:Open Source License

@NbBundle.Messages({ "# {0} - file name",
        "VideoUtils.genVideoThumb.progress.text=extracting temporary file {0}" })
static BufferedImage generateVideoThumbnail(AbstractFile file, int iconSize) {
    java.io.File tempFile = getTempVideoFile(file);
    if (tempFile.exists() == false || tempFile.length() < file.getSize()) {
        ProgressHandle progress = ProgressHandle
                .createHandle(Bundle.VideoUtils_genVideoThumb_progress_text(file.getName()));
        progress.start(100);/*from   www . ja v  a 2s . com*/
        try {
            Files.createParentDirs(tempFile);
            ContentUtils.writeToFile(file, tempFile, progress, null, true);
        } catch (IOException ex) {
            LOGGER.log(Level.WARNING,
                    "Error extracting temporary file for " + ImageUtils.getContentPathSafe(file), ex); //NON-NLS
        } finally {
            progress.finish();
        }
    }

    VideoCapture videoFile = new VideoCapture(); // will contain the video

    if (!videoFile.open(tempFile.toString())) {
        LOGGER.log(Level.WARNING, "Error opening {0} for preview generation.",
                ImageUtils.getContentPathSafe(file)); //NON-NLS
        return null;
    }
    double fps = videoFile.get(CV_CAP_PROP_FPS); // gets frame per second
    double totalFrames = videoFile.get(CV_CAP_PROP_FRAME_COUNT); // gets total frames
    if (fps <= 0 || totalFrames <= 0) {
        LOGGER.log(Level.WARNING, "Error getting fps or total frames for {0}",
                ImageUtils.getContentPathSafe(file)); //NON-NLS
        return null;
    }
    double milliseconds = 1000 * (totalFrames / fps); //total milliseconds

    double timestamp = Math.min(milliseconds, 500); //default time to check for is 500ms, unless the files is extremely small

    int framkeskip = Double.valueOf(Math.floor((milliseconds - timestamp) / (THUMB_COLUMNS * THUMB_ROWS)))
            .intValue();

    Mat imageMatrix = new Mat();
    BufferedImage bufferedImage = null;

    for (int x = 0; x < THUMB_COLUMNS; x++) {
        for (int y = 0; y < THUMB_ROWS; y++) {
            if (!videoFile.set(CV_CAP_PROP_POS_MSEC,
                    timestamp + x * framkeskip + y * framkeskip * THUMB_COLUMNS)) {
                LOGGER.log(Level.WARNING, "Error seeking to " + timestamp + "ms in {0}",
                        ImageUtils.getContentPathSafe(file)); //NON-NLS
                break; // if we can't set the time, return black for that frame
            }
            //read the frame into the image/matrix
            if (!videoFile.read(imageMatrix)) {
                LOGGER.log(Level.WARNING, "Error reading frames at " + timestamp + "ms from {0}",
                        ImageUtils.getContentPathSafe(file)); //NON-NLS
                break; //if the image for some reason is bad, return black for that frame
            }

            if (bufferedImage == null) {
                bufferedImage = new BufferedImage(imageMatrix.cols() * THUMB_COLUMNS,
                        imageMatrix.rows() * THUMB_ROWS, BufferedImage.TYPE_3BYTE_BGR);
            }

            byte[] data = new byte[imageMatrix.rows() * imageMatrix.cols() * (int) (imageMatrix.elemSize())];
            imageMatrix.get(0, 0, data); //copy the image to data

            //todo: this looks like we are swapping the first and third channels.  so we can use  BufferedImage.TYPE_3BYTE_BGR
            if (imageMatrix.channels() == 3) {
                for (int k = 0; k < data.length; k += 3) {
                    byte temp = data[k];
                    data[k] = data[k + 2];
                    data[k + 2] = temp;
                }
            }

            bufferedImage.getRaster().setDataElements(imageMatrix.cols() * x, imageMatrix.rows() * y,
                    imageMatrix.cols(), imageMatrix.rows(), data);
        }
    }

    videoFile.release(); // close the file

    return bufferedImage == null ? null : ScalrWrapper.resizeFast(bufferedImage, iconSize);
}

From source file:org.usfirst.frc.team2084.CMonster2016.vision.HighGoalProcessor.java

License:Open Source License

/**
 * Gets of the robot when the given image was taken. It does this by reading
 * the timestamp from the top left corner of the image and looking up the
 * heading in the history buffer.//from   w  w  w .  j  av  a  2 s  .  c om
 * 
 * @param image the image to get the timestamp from
 * @return the heading of the robot when the image was taken
 */
private double getHeading(Mat image) {
    double heading;
    {
        byte[] timestampBytes = new byte[9];
        image.get(0, 0, timestampBytes);
        long timestamp = Utils.bytesToLong(timestampBytes);

        synchronized (headingBuffer) {
            heading = headingBuffer.getValue(timestamp / 1000.0);
        }
    }
    return heading;
}

From source file:org.usfirst.frc.team2084.CMonster2016.vision.ImageConvertor.java

License:Open Source License

public BufferedImage toBufferedImage(Mat image) {
    int width = image.width();
    int height = image.height();
    int type = image.type();
    // Get BufferedImage type
    int javaType = toJavaImageType(type);
    // If the Mat does not match the BufferedImage, create a new one.
    if (javaImage == null || width != javaImage.getWidth() || height != javaImage.getHeight()
            || javaType != javaImage.getType()) {
        javaImage = new BufferedImage(width, height, javaType);
    }/*from w w  w  .j  a va 2 s  . c om*/
    // Copy Mat data to BufferedImage
    image.get(0, 0, ((DataBufferByte) javaImage.getRaster().getDataBuffer()).getData());
    return javaImage;
}

From source file:pipeline.TextRegion.java

public static String SplitFiles(File fileIn) {
    String result = "";
    try {/*from w  ww.  ja  v  a 2  s.c o m*/
        String nomeFile = fileIn.getName();
        //System.out.println("il nome del file  "+nomeFile);
        FileInputStream in = new FileInputStream("src/pipeline/receivedImg/" + nomeFile);
        JPEGImageDecoder decoder = JPEGCodec.createJPEGDecoder(in);
        BufferedImage image = decoder.decodeAsBufferedImage();
        in.close();

        TextRecognition myget = new TextRecognition(image);
        LinkedList boxes = myget.getTextBoxes();

        String nomeFileOut = "src/pipeline/outputImg/" + Global.getJPGNameFile() + " out.jpg";
        FileOutputStream out = new FileOutputStream(nomeFileOut);
        JPEGImageEncoder encoder = JPEGCodec.createJPEGEncoder(out);
        encoder.encode(myget.isolateText(boxes));
        out.close();

        //parte con opencv

        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        File f = new File("src/pipeline/receivedImg/" + nomeFile);
        BufferedImage imageFile = ImageIO.read(f);

        byte[] data = ((DataBufferByte) imageFile.getRaster().getDataBuffer()).getData();
        Mat mat = new Mat(imageFile.getHeight(), imageFile.getWidth(), CvType.CV_8UC3);
        mat.put(0, 0, data);
        int tolleranza = 15;

        for (int i = 0; i < boxes.size(); i++) {
            TextRegion app = (TextRegion) boxes.get(i);
            //             System.out.println("RIGA: "+i+"  -> "+app.x1 +" "+app.x2 +" "+app.y1 +" "+app.y2 +" ");
            Rect roi1 = new Rect(app.x1 - tolleranza, app.y1 - tolleranza, app.x2 - app.x1 + tolleranza,
                    app.y2 - app.y1 + 2 * tolleranza);
            Mat mat1 = new Mat(mat, roi1);

            byte[] data1 = new byte[mat1.rows() * mat1.cols() * (int) (mat1.elemSize())];
            mat1.get(0, 0, data1);
            BufferedImage image1 = new BufferedImage(mat1.cols(), mat1.rows(), BufferedImage.TYPE_3BYTE_BGR);
            image1.getRaster().setDataElements(0, 0, mat1.cols(), mat1.rows(), data1);

            String nomeFileUscrita = "src/pipeline/outputImg/" + i + Global.getJPGNameFile() + " uscita.jpg";
            File tmp = new File(nomeFileUscrita);
            File output = new File(nomeFileUscrita);
            ImageIO.write(image1, "jpg", output);
            result += (i + 1) + ")" + OCR_Processing.performOCR_String2Text(output);
            tmp.delete();

        }
        f.delete();
        File foo = new File(nomeFileOut);
        foo.delete();

    } catch (Exception e) {
        System.out.println("Exception: " + e);
    }

    return result;

}

From source file:processdata.ExperimentalDataProcessingUI.java

public static BufferedImage Mat2BufferedImage(Mat m) {
    // source: http://answers.opencv.org/question/10344/opencv-java-load-image-to-gui/
    // Fastest code
    // The output can be assigned either to a BufferedImage or to an Image

    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (m.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from ww w .  j a  v  a2  s .  co m*/
    int bufferSize = m.channels() * m.cols() * m.rows();
    byte[] b = new byte[bufferSize];
    m.get(0, 0, b); // get all the pixels
    BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(b, 0, targetPixels, 0, b.length);
    return image;

}