Example usage for org.opencv.core Mat get

List of usage examples for org.opencv.core Mat get

Introduction

In this page you can find the example usage for org.opencv.core Mat get.

Prototype

public int get(int row, int col, double[] data) 

Source Link

Usage

From source file:javafx1.JavaFX1.java

public Image bildLaden() {
        Image zwischenBild = null;

        try {//  w ww .ja v  a 2s .  c om
            File input = new File("D:/_piCam/bild.jpg");
            //FileInputStream bi = ImageIO.read(input);
            BufferedImage bi = ImageIO.read(input);

            byte[] data = ((DataBufferByte) bi.getRaster().getDataBuffer()).getData();
            Mat mat = new Mat(bi.getHeight(), bi.getWidth(), CvType.CV_8UC3);
            mat.put(0, 0, data);

            Mat bild = new Mat(bi.getHeight(), bi.getWidth(), CvType.CV_8UC1);
            Imgproc.cvtColor(mat, bild, Imgproc.COLOR_BGR2GRAY);

            byte[] data1 = new byte[bild.rows() * bild.cols() * (int) (bild.elemSize())];
            bild.get(0, 0, data1);
            BufferedImage image1 = new BufferedImage(bild.cols(), bild.rows(), BufferedImage.TYPE_BYTE_GRAY);
            image1.getRaster().setDataElements(0, 0, bild.cols(), bild.rows(), data1);

            File ouptut = new File("D:/xml/grayscale2.jpg");
            //ImageIO.write(image1, "jpg", ouptut);
            BufferedImage gray = image1.getSubimage(0, 0, image1.getTileWidth(), image1.getHeight());
            zwischenBild = SwingFXUtils.toFXImage(gray, null);

        } catch (IOException ex) {
            System.out.println("Fehler beim Bild laden...");
        }
        return zwischenBild;
    }

From source file:karthik.Barcode.MatrixBarcode.java

License:Open Source License

private void calcHistograms() {
    /* calculate histogram by masking for angles inside each bin, thresholding to set all those values to 1
       and then creating an integral image. We can now calculate histograms for any size tile within 
       the original image more efficiently than by using the built in calcHist method which would have to 
       recalculate the histogram for every tile size.
    *///from   ww w.j  a v  a 2  s . c  o m
    Mat target;
    angles = img_details.gradient_direction.clone();
    target = img_details.temp_integral;

    for (int binRange = 1, integralIndex = 0; binRange < 181; binRange += img_details.BIN_WIDTH, integralIndex++) {
        target.setTo(ZERO_SCALAR);

        img_details.gradient_direction.copyTo(angles);
        Core.inRange(img_details.gradient_direction, scalarDict.get(binRange),
                scalarDict.get(binRange + img_details.BIN_WIDTH), mask);
        Core.bitwise_not(mask, mask);
        angles.setTo(ZERO_SCALAR, mask);

        Imgproc.threshold(angles, target, 0, 1, Imgproc.THRESH_BINARY);
        Imgproc.integral(target, target);
        target.get(0, 0, img_details.histIntegralArrays[integralIndex]);
    }

    // there is some problem if the created integral image does not have exactly one channel
    assert (target.channels() == 1) : "Integral does not have exactly one channel";

}

From source file:LetsStart.utils.ImageProcessor.java

public BufferedImage toBufferedImage(Mat matrix) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (matrix.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }//  w w  w.ja  va  2s  .  com
    int bufferSize = matrix.channels() * matrix.cols() * matrix.rows();
    byte[] buffer = new byte[bufferSize];
    matrix.get(0, 0, buffer); // get all the pixels
    BufferedImage image = new BufferedImage(matrix.cols(), matrix.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(buffer, 0, targetPixels, 0, buffer.length);
    return image;
}

From source file:LetsStart.utils.ImageViewer.java

public Image toBufferedImage(Mat matrix) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (matrix.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }//from   ww  w. ja v  a2 s.co  m
    int bufferSize = matrix.channels() * matrix.cols() * matrix.rows();
    byte[] buffer = new byte[bufferSize];
    matrix.get(0, 0, buffer); // get all the pixels
    BufferedImage image = new BufferedImage(matrix.cols(), matrix.rows(), type);
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(buffer, 0, targetPixels, 0, buffer.length);
    return image;
}

From source file:logic.featurepointextractor.EyeIrisesFPE.java

@Override
public Point[] detect(MatContainer mc) {
    Mat eyePairMat = mc.grayEyePairMat;/*ww  w  .j  a v  a 2s  .com*/
    Rect eyePairRect = mc.eyePairRect;
    Rect faceRect = mc.faceRect;

    //search for eye centers
    Mat circlesMat = new Mat();
    double minDist = 2 * eyePairRect.width / 5;
    int minRad = eyePairRect.height / 5;
    int maxRad = 2 * eyePairRect.height / 3;

    Imgproc.HoughCircles(eyePairMat, circlesMat, Imgproc.CV_HOUGH_GRADIENT, 3.0, minDist, 200.0, 20.0, minRad,
            maxRad);

    float arr1[] = new float[3];
    float arr2[] = new float[3];

    if (circlesMat.size().width == 2) {
        circlesMat.get(0, 0, arr1);
        circlesMat.get(0, 1, arr2);

        float f11 = arr1[0], f12 = arr1[1], f21 = arr2[0], f22 = arr2[1];

        if (Math.abs(f11 - f21) < Parameters.irisXDifferencesThreshold * eyePairRect.width
                && Math.abs(f12 - f22) > Parameters.irisYDifferencesThreshold) {

            //find where left and right eye
            if (f11 < f21)
                //left-right
                return new Point[] {
                        new Point(f11 + faceRect.x + eyePairRect.x, f12 + faceRect.y + eyePairRect.y),
                        new Point(f21 + faceRect.x + eyePairRect.x, f22 + faceRect.y + eyePairRect.y) };
            else
                //right-left
                return new Point[] {
                        new Point(f21 + faceRect.x + eyePairRect.x, f22 + faceRect.y + eyePairRect.y),
                        new Point(f11 + faceRect.x + eyePairRect.x, f12 + faceRect.y + eyePairRect.y) };
        }
    }

    LOG.warn("Extract eye iris: FAIL");

    return null;
}

From source file:Main.Camera.CameraController.java

public static BufferedImage matToBufferedImage(Mat matrix, BufferedImage bimg) {
    if (matrix != null) {
        int cols = matrix.cols();
        int rows = matrix.rows();
        int elemSize = (int) matrix.elemSize();
        byte[] data = new byte[cols * rows * elemSize];
        int type;
        matrix.get(0, 0, data);
        switch (matrix.channels()) {
        case 1://ww w  .  jav  a 2s .co  m
            type = BufferedImage.TYPE_BYTE_GRAY;
            break;
        case 3:
            type = BufferedImage.TYPE_3BYTE_BGR;
            // bgr to rgb  
            byte b;
            for (int i = 0; i < data.length; i = i + 3) {
                b = data[i];
                data[i] = data[i + 2];
                data[i + 2] = b;
            }
            break;
        default:
            return null;
        }

        // Reuse existing BufferedImage if possible
        if (bimg == null || bimg.getWidth() != cols || bimg.getHeight() != rows || bimg.getType() != type) {
            bimg = new BufferedImage(cols, rows, type);
        }
        bimg.getRaster().setDataElements(0, 0, cols, rows, data);
    } else { // mat was null
        bimg = null;
    }
    return bimg;
}

From source file:main.PGMReader.java

public BufferedImage matToBufferedImage(Mat original) {
    // init/*from   w w  w  . j a va  2  s. c  om*/
    BufferedImage image = null;
    int width = original.width();
    int height = original.height();
    int channels = original.channels();

    byte[] sourcePixels = new byte[width * height * channels];
    original.get(0, 0, sourcePixels);

    if (original.channels() > 1) {
        image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    } else {
        image = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
    }
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);

    return image;
}

From source file:main.Utils.java

public BufferedImage convertMatToImage(Mat mat) {
    int type = BufferedImage.TYPE_BYTE_GRAY;
    if (mat.channels() > 1) {
        type = BufferedImage.TYPE_3BYTE_BGR;
    }/*from  w w  w . j a  v a  2s  . c o m*/

    int bufferSize = mat.channels() * mat.cols() * mat.rows();
    byte[] bytes = new byte[bufferSize];
    mat.get(0, 0, bytes);
    BufferedImage imagem = new BufferedImage(mat.cols(), mat.rows(), type);
    byte[] targetPixels = ((DataBufferByte) imagem.getRaster().getDataBuffer()).getData();
    System.arraycopy(bytes, 0, targetPixels, 0, bytes.length);
    return imagem;
}

From source file:main.Utils.java

public BufferedImage matToBufferedImage(Mat original) {
    // init/*from  w w  w  .j a  v  a 2 s  . c  om*/
    BufferedImage image = null;
    int width = original.width(), height = original.height(), channels = original.channels();
    byte[] sourcePixels = new byte[width * height * channels];
    original.get(0, 0, sourcePixels);

    if (original.channels() > 1) {
        image = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
    } else {
        image = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
    }
    final byte[] targetPixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    System.arraycopy(sourcePixels, 0, targetPixels, 0, sourcePixels.length);

    return image;
}

From source file:model.grayscaleClass.java

/**
 *
 * @param image//from  w  w w  .j av a2s  .co  m
* @return
 */
public File imagePreprocessing(String image, String ex) {
    BufferedImage bImge = null;
    BufferedImage bImage2 = null;
    File grayscle = null;

    try {

        // loadOpenCV_Lib();
        //String path = "opencv\\build\\java\\x64\\opencv_java300.dll";
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        FileInputStream fileName = new FileInputStream(
                "C:\\bimla\\Dev\\java\\OCRSystem\\WebContent\\uploadedImage\\" + image);
        InputStream input = fileName;
        bImge = ImageIO.read(input);
        byte[] imgeByte = ((DataBufferByte) bImge.getRaster().getDataBuffer()).getData();
        Mat mat1 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC3);
        mat1.put(0, 0, imgeByte);
        Mat mat2 = new Mat(bImge.getHeight(), bImge.getWidth(), CvType.CV_8UC1);
        Imgproc.cvtColor(mat1, mat2, Imgproc.COLOR_RGB2GRAY);
        byte[] imageData = new byte[mat2.rows() * mat2.cols() * (int) (mat2.elemSize())];
        mat2.get(0, 0, imageData);
        bImage2 = new BufferedImage(mat2.cols(), mat2.rows(), BufferedImage.TYPE_BYTE_GRAY);
        bImage2.getRaster().setDataElements(0, 0, mat2.cols(), mat2.rows(), imageData);

        String extn = null;
        /*   switch (extnsn) {
               case ".jpg":
           extn = "jpg";
           break;
               case ".png":
           extn = "png";
           break;
               case ".pdf":
           extn = "pdf";
           break;
               case ".tiff":
           extn = "tif";
           break;
                
           }*/
        //writing the grayscale image to the folder
        grayscle = new File(
                "C:\\bimla\\Dev\\java\\OCRSystem\\WebContent\\uploadedImage\\grayscale" + "." + "jpg");
        ImageIO.write(bImage2, "jpg", grayscle);
    } catch (IOException ex1) {
        System.out.println("" + ex1.getMessage());
    } catch (Exception ex1) {
        Logger.getLogger(grayscaleClass.class.getName()).log(Level.SEVERE, null, ex1);
    }
    return grayscle;

}