Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat(Mat m, Range rowRange, Range colRange) 

Source Link

Usage

From source file:com.wallerlab.compcellscope.MultiModeViewActivity.java

License:BSD License

public void onCameraViewStarted(int width, int height) {

    mRgba = new Mat(height, width, CvType.CV_8UC4);
    sz = new Size(width, height); //960x1280 (5) 480x800 (4)
    mIntermediateMat = new Mat(height, width, CvType.CV_16UC4);

    dpcLeft = new Mat(height, width, CvType.CV_8UC4);
    dpcRight = new Mat(height, width, CvType.CV_8UC4);
    dpcTop = new Mat(height, width, CvType.CV_8UC4);
    dpcBottom = new Mat(height, width, CvType.CV_8UC4);

    mmGrid = new Mat(height, width, CvType.CV_8UC4);

    bfImg = new Mat(height, width, CvType.CV_8UC4);
    dfImg = new Mat(height, width, CvType.CV_8UC4);
    dpcLRImg = new Mat(height, width, CvType.CV_8UC4);
    dpcTBImg = new Mat(height, width, CvType.CV_8UC4);

    TLRect = new Rect(0, 0, width / 2, height / 2);
    TRRect = new Rect(width / 2, 0, width / 2, height / 2);
    BLRect = new Rect(0, height / 2, width / 2, height / 2);
    BRRect = new Rect(width / 2, height / 2, width / 2, height / 2);

    mCamera = mOpenCvCameraView.getCameraObject();
    //Set Exposure
    sendData("bf");
    Camera.Parameters camParams;/*from w  ww. j  av a2s.  c  o m*/
    camParams = mCamera.getParameters();
    camParams.setAutoExposureLock(false);
    try {
        Thread.sleep(1000);
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
    //camParams.setAutoExposureLock(true);
    mCamera.setParameters(camParams);

}

From source file:com.wallerlab.compcellscope.MultiModeViewActivity.java

License:BSD License

public Mat calcDPC(Mat in1, Mat in2, Mat out) {
    Mat Mat1 = new Mat(in1.width(), in1.height(), in1.type());
    Mat Mat2 = new Mat(in2.width(), in2.height(), in2.type());
    in1.copyTo(Mat1);/*from   w  ww . j a v  a2 s.  co m*/
    in2.copyTo(Mat2);

    Imgproc.cvtColor(Mat1, Mat1, Imgproc.COLOR_RGBA2GRAY, 1);
    Imgproc.cvtColor(Mat2, Mat2, Imgproc.COLOR_RGBA2GRAY, 1);

    Mat output = new Mat(Mat1.width(), Mat1.height(), CvType.CV_8UC4);
    Mat dpcSum = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcDifference = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);
    Mat dpcImgF = new Mat(Mat1.width(), Mat1.height(), CvType.CV_32FC1);

    /*
    Log.d(TAG,String.format("Mat1 format is %.1f-%.1f, type: %d",Mat1.size().width,Mat1.size().height,Mat1.type()));
    Log.d(TAG,String.format("Mat2 format is %.1f-%.1f, type: %d",Mat2.size().width,Mat2.size().height,Mat2.type()));
    */

    // Convert to Floats
    Mat1.convertTo(Mat1, CvType.CV_32FC1);
    Mat2.convertTo(Mat2, CvType.CV_32FC1);
    Core.add(Mat1, Mat2, dpcSum);
    Core.subtract(Mat1, Mat2, dpcDifference);
    Core.divide(dpcDifference, dpcSum, dpcImgF);
    Core.add(dpcImgF, new Scalar(1.0), dpcImgF); // Normalize to 0-2.0
    Core.multiply(dpcImgF, new Scalar(110), dpcImgF); // Normalize to 0-255
    dpcImgF.convertTo(output, CvType.CV_8UC1); // Convert back into RGB
    Imgproc.cvtColor(output, output, Imgproc.COLOR_GRAY2RGBA, 4);

    dpcSum.release();
    dpcDifference.release();
    dpcImgF.release();
    Mat1.release();
    Mat2.release();

    Mat maskedImg = Mat.zeros(output.rows(), output.cols(), CvType.CV_8UC4);
    int radius = maskedImg.width() / 2 + 25;
    Core.circle(maskedImg, new Point(maskedImg.width() / 2, maskedImg.height() / 2), radius,
            new Scalar(255, 255, 255), -1, 8, 0);
    output.copyTo(out, maskedImg);
    output.release();
    maskedImg.release();
    return out;
}

From source file:com.wallerlab.processing.tasks.ComputeRefocusTask.java

License:BSD License

private Bitmap[] computeFocus(float z) {
    int width = mDataset.WIDTH - 2 * mDataset.XCROP;
    int height = mDataset.HEIGHT - 2 * mDataset.YCROP;

    Mat result = new Mat(height, width, CvType.CV_32FC4);
    Mat result8 = new Mat(height, width, CvType.CV_8UC4);

    Mat dpc_result_tb = new Mat(height, width, CvType.CV_32FC4);
    Mat dpc_result_tb8 = new Mat(height, width, CvType.CV_8UC4);

    Mat dpc_result_lr = new Mat(height, width, CvType.CV_32FC4);
    Mat dpc_result_lr8 = new Mat(height, width, CvType.CV_8UC4);

    Mat img;/*from   w w w  . j a  va 2 s. c  o  m*/
    Mat img32 = new Mat(height, width, CvType.CV_32FC4);
    Mat shifted;

    for (int idx = 0; idx < mDataset.fileCount; idx++) {
        img = ImageUtils.toMat(BitmapFactory.decodeByteArray(fileByteList[idx], 0, fileByteList[idx].length));
        img = img.submat(mDataset.YCROP, mDataset.HEIGHT - mDataset.YCROP, mDataset.XCROP,
                mDataset.WIDTH - mDataset.XCROP);
        img.convertTo(img32, result.type());

        // Grab actual hole number from filename
        String fName = mDataset.fileList[idx].toString();
        String hNum = fName.substring(fName.indexOf("_scanning_") + 10, fName.indexOf(".jpeg"));
        int holeNum = Integer.parseInt(hNum);
        //Log.d(TAG,String.format("BF Scan Header is: %s", hNum));

        // Calculate these based on array coordinates
        int xShift = (int) Math.round(z * tanh_lit[holeNum]);
        int yShift = (int) Math.round(z * tanv_lit[holeNum]);

        shifted = ImageUtils.circularShift(img32, yShift, xShift);

        if (mDataset.leftList.contains(holeNum)) //add LHS
        {
            Core.add(dpc_result_lr, shifted, dpc_result_lr);
        } else //subtract RHS
        {
            Core.subtract(dpc_result_lr, shifted, dpc_result_lr);
        }

        if (mDataset.topList.contains(holeNum)) //add Top
        {
            Core.add(dpc_result_tb, shifted, dpc_result_tb);
        } else //subtract bottom
        {
            Core.subtract(dpc_result_tb, shifted, dpc_result_tb);
        }

        Core.add(result, shifted, result);

        float progress = ((idx + 1) / (float) mDataset.fileCount);
        onProgressUpdate((int) (progress * 100), -1);
        Log.d(TAG, String.format("progress: %f", progress));
    }

    Core.MinMaxLocResult minMaxLocResult1 = Core.minMaxLoc(result.reshape(1));
    result.convertTo(result8, CvType.CV_8UC4, 255 / minMaxLocResult1.maxVal);

    Core.MinMaxLocResult minMaxLocResult2 = Core.minMaxLoc(dpc_result_lr.reshape(1));
    dpc_result_lr.convertTo(dpc_result_lr8, CvType.CV_8UC4,
            255 / (minMaxLocResult2.maxVal - minMaxLocResult2.minVal),
            -minMaxLocResult2.minVal * 255.0 / (minMaxLocResult2.maxVal - minMaxLocResult2.minVal));

    Core.MinMaxLocResult minMaxLocResult3 = Core.minMaxLoc(dpc_result_tb.reshape(1));
    dpc_result_tb.convertTo(dpc_result_tb8, CvType.CV_8UC4,
            255 / (minMaxLocResult3.maxVal - minMaxLocResult3.minVal),
            -minMaxLocResult3.minVal * 255.0 / (minMaxLocResult3.maxVal - minMaxLocResult3.minVal));

    /*
    Log.d(TAG,String.format("result_min: %f, max: %f",minMaxLocResult1.minVal,minMaxLocResult1.maxVal));
    Log.d(TAG,String.format("lr_min: %f, max: %f",minMaxLocResult2.minVal,minMaxLocResult2.maxVal));
    Log.d(TAG,String.format("tb_min: %f, max: %f",minMaxLocResult3.minVal,minMaxLocResult3.maxVal));
    */

    // remove transparency in DPC images
    Scalar alphaMask = new Scalar(new double[] { 1.0, 1.0, 1.0, 255.0 });

    Core.multiply(dpc_result_lr8, alphaMask, dpc_result_lr8);
    Core.multiply(dpc_result_tb8, alphaMask, dpc_result_tb8);

    if (!mDataset.USE_COLOR_DPC) {
        Imgproc.cvtColor(dpc_result_lr8, dpc_result_lr8, Imgproc.COLOR_BGR2GRAY);
        Imgproc.cvtColor(dpc_result_tb8, dpc_result_tb8, Imgproc.COLOR_BGR2GRAY);
    }

    /*
    // Cut off edges in DPC images
    Point centerPt = new Point();
    centerPt.x = Math.round((float)width/2.0);
    centerPt.y = Math.round((float)height/2.0);
    Mat circleMat = new Mat(dpc_result_lr8.size(), dpc_result_lr8.type());
    Scalar color = new Scalar(255);
    Core.circle(circleMat, centerPt, 200, color);
    //Core.bitwise_and(circleMat, dpc_result_lr8, dpc_result_lr8);
    //Core.bitwise_and(circleMat, dpc_result_tb8, dpc_result_tb8);
    * 
    * 
    */

    Bitmap[] outputBitmaps = new Bitmap[3];
    outputBitmaps[0] = ImageUtils.toBitmap(result8);
    outputBitmaps[1] = ImageUtils.toBitmap(dpc_result_lr8);
    outputBitmaps[2] = ImageUtils.toBitmap(dpc_result_tb8);

    return outputBitmaps;
}

From source file:contador_de_moedas.Circulo.java

private void IniciaImag() {
    this.output = new Mat(imageB.height(), imageB.width(), CvType.CV_8UC3);
}

From source file:contador_de_moedas.Circulo.java

private void LoadImagB() {
    try {//from   w  ww . j av  a  2s  .  com
        BufferedImage image = ImageIO.read(imgFB);
        byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
        imageB = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC3);
        imageB.put(0, 0, data);
    } catch (IOException ex) {
        Logger.getLogger(Circulo.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:contador_de_moedas.Circulo.java

private void LoadImagA() {
    try {/*from w ww  .  j  a va  2 s.  c o  m*/
        BufferedImage image = ImageIO.read(imgFA);
        byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
        imageA = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC3);
        imageA.put(0, 0, data);
    } catch (IOException ex) {
        Logger.getLogger(Circulo.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:contador_de_moedas.Segmentacao.java

private void IniciaImag() {
    this.output = new Mat(image.width(), image.height(), CvType.CV_8UC3);
}

From source file:contador_de_moedas.Segmentacao.java

private void LoadImagA() {
    try {/*from  w w w.  j a v  a 2  s .com*/
        BufferedImage imageA = ImageIO.read(imgF);
        byte[] data = ((DataBufferByte) imageA.getRaster().getDataBuffer()).getData();
        image = new Mat(imageA.getHeight(), imageA.getWidth(), CvType.CV_8UC3);
        image.put(0, 0, data);
        image = Binarizar(image);
    } catch (IOException ex) {
        Logger.getLogger(Segmentacao.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:controller.ToneMapping.java

public Mat reinhardTMO() {

    getLuminance();/* www . ja  v a2s  .co m*/
    double Lavg = calcLumAvg();
    RadianceMap temp = toneMapped(Lavg);

    double[] newValue = new double[3];
    Mat hdri = new Mat(h, w, 16);
    for (int y = 0; y < h; y++) {
        for (int x = 0; x < w; x++) {
            for (int i = 0; i < 3; i++) {
                double value = temp.getrMap_pixel(i, y, x);
                newValue[i] = value;
            }
            //                System.out.print(newValue[0] + "," + newValue[1] + "," + newValue[2]+";");
            hdri.put(y, x, newValue[2], newValue[1], newValue[0]);
        }
        //            System.out.println("");
    }
    return hdri;
}

From source file:cpsd.ImageGUI.java

private void cannyButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cannyButtonActionPerformed
    try {//from ww w . ja v  a 2  s  .  c om
        Mat source = ImageClass.getInstance().getImage();

        Mat destination = new Mat(source.rows(), source.cols(), source.type());
        threshold(source, destination, 50, 255, CV_THRESH_BINARY);
        // ImageClass.getInstance().setImage(destination);
        // Imgproc.GaussianBlur(destination,destination,new org.opencv.core.Size(0,0),10);
        // fastNlMeansDenoising(destination,destination,3,7,21);
        // source = ImageClass.getInstance().getImage();
        Imgproc.Canny(destination, destination, 0.05, 0.15, 3, true);
        ImageClass.getInstance().setImage(destination);
    } catch (NullPointerException e) {
        System.err.println("..........Please load a valid Image..........");
    }
    displayImage(); // TODO add your handling code here:
}