Example usage for org.opencv.core Mat convertTo

List of usage examples for org.opencv.core Mat convertTo

Introduction

In this page you can find the example usage for org.opencv.core Mat convertTo.

Prototype

public void convertTo(Mat m, int rtype, double alpha, double beta) 

Source Link

Usage

From source file:cpsd.ImageGUI.java

private void enhanceBrightness(double alpha) {
    Mat source = ImageClass.getInstance().getImage();
    Mat destination = new Mat(source.rows(), source.cols(), source.type());
    source.convertTo(destination, -1, alpha, 50);
    ImageClass.getInstance().setImage(destination);
    // brightSlider.setValue((int)(alpha*10));
}

From source file:cx.uni.jk.mms.iaip.filter.GreyAutoContrastBrightness.java

License:Open Source License

@Override
public Mat convert(Mat mat) {
    /** find contrast and brightness to fit into 8 bit */
    MinMaxLocResult mmlr = Core.minMaxLoc(mat);
    double min = mmlr.minVal; // Math.min(mmlr.minVal, 0);
    double max = mmlr.maxVal; // Math.max(mmlr.maxVal, 255);
    double alpha = 256.0d / (max - min);
    double beta = -min * alpha;

    /** conversion to 8 bit Mat */
    Mat byteMat = new MatOfByte();
    mat.convertTo(byteMat, CvType.CV_8U, alpha, beta);

    return byteMat;
}

From source file:cx.uni.jk.mms.iaip.filter.LogOfOnePlusAbs.java

License:Open Source License

@Override
public Mat convert(Mat mat) {

    /** make absolute values and log */
    Mat tempMat = mat.clone();
    Core.absdiff(tempMat, new Scalar(0.0d), tempMat);
    Core.add(tempMat, new Scalar(1.0d), tempMat);
    Core.log(tempMat, tempMat);/*from  w ww  .  j a va  2  s .  c  o m*/

    /** find contrast and brightness to fit into 8 bit */
    MinMaxLocResult mmlr = Core.minMaxLoc(tempMat);
    double min = Math.min(mmlr.minVal, 0);
    double max = mmlr.maxVal;
    double alpha = 256.0d / (max - min);
    double beta = -min * alpha;

    /** conversion to 8 bit Mat applying contrast alpha and brightness beta */
    Mat byteMat = new MatOfByte();
    tempMat.convertTo(byteMat, CvType.CV_8U, alpha, beta);

    return byteMat;
}

From source file:cx.uni.jk.mms.iaip.filter.LogRedBlue.java

License:Open Source License

@Override
public Mat convert(Mat mat) {

    MinMaxLocResult negativeMmlr, positiveMmlr;
    double min, max, alpha, beta;

    /** negative values to positive and log */
    Mat negativeMat = mat.clone();
    Core.min(negativeMat, new Scalar(0.0d), negativeMat);
    Core.multiply(negativeMat, new Scalar(-1.0d), negativeMat);
    Core.add(negativeMat, new Scalar(1.0d), negativeMat);
    Core.log(negativeMat, negativeMat);/* www .  j  a va  2s.c om*/

    /** positve values log */
    Mat positiveMat = mat.clone();
    Core.max(positiveMat, new Scalar(0.0d), positiveMat);
    Core.add(positiveMat, new Scalar(1.0d), positiveMat);
    Core.log(positiveMat, positiveMat);

    /** find common contrast and brightness to fit into 8 bit */
    negativeMmlr = Core.minMaxLoc(negativeMat);
    positiveMmlr = Core.minMaxLoc(positiveMat);
    min = 0;
    max = Math.max(negativeMmlr.maxVal, positiveMmlr.maxVal);
    alpha = 256.0d / (max - min);
    beta = -min * alpha;

    /** conversion of both matrices to 8 bit */
    negativeMat.convertTo(negativeMat, CvType.CV_8UC1, alpha, beta);
    positiveMat.convertTo(positiveMat, CvType.CV_8UC1, alpha, beta);

    /** combine both matrices into one 8 bit 3 channel rgb picture */
    Mat tempMat = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC3);
    List<Mat> mixSrcMats = new ArrayList<>();
    mixSrcMats.add(negativeMat); // 1 channel: 0
    mixSrcMats.add(positiveMat); // 1 channel: 1
    List<Mat> mixDstMats = new ArrayList<>();
    mixDstMats.add(tempMat); // 3 channels: 0-2
    MatOfInt fromToMat = new MatOfInt(0, 0 /* neg->red */, -1, 1/*
                                                                * null->green
                                                                */, 1, 2 /*
                                                                          * pos-
                                                                          * >
                                                                          * blue
                                                                          */);
    Core.mixChannels(mixSrcMats, mixDstMats, fromToMat);

    return tempMat;
}

From source file:cx.uni.jk.mms.iaip.filter.LogYellowCyan.java

License:Open Source License

@Override
public Mat convert(Mat mat) {

    MinMaxLocResult negativeMmlr, positiveMmlr;
    double min, max, alpha, beta;

    /** negative values to positive and log */
    Mat negativeMat = mat.clone();
    Core.min(negativeMat, new Scalar(0.0d), negativeMat);
    Core.multiply(negativeMat, new Scalar(-1.0d), negativeMat);
    Core.add(negativeMat, new Scalar(1.0d), negativeMat);
    Core.log(negativeMat, negativeMat);//from   ww w.ja va 2 s  .c o  m

    /** positve values log */
    Mat positiveMat = mat.clone();
    Core.max(positiveMat, new Scalar(0.0d), positiveMat);
    Core.add(positiveMat, new Scalar(1.0d), positiveMat);
    Core.log(positiveMat, positiveMat);

    /** find common contrast and brightness to fit into 8 bit */
    negativeMmlr = Core.minMaxLoc(negativeMat);
    positiveMmlr = Core.minMaxLoc(positiveMat);
    min = 0;
    max = Math.max(negativeMmlr.maxVal, positiveMmlr.maxVal);
    alpha = 256.0d / (max - min);
    beta = -min * alpha;

    /** conversion of both matrices to 8 bit */
    negativeMat.convertTo(negativeMat, CvType.CV_8UC1, alpha, beta);
    positiveMat.convertTo(positiveMat, CvType.CV_8UC1, alpha, beta);

    /** create additional mat for saturated green */
    Mat brightMat = negativeMat.clone();
    Core.max(negativeMat, positiveMat, brightMat);
    // Core.absdiff(brightMat, new Scalar(255.0d), brightMat);
    // Core.multiply(brightMat, new Scalar(1.0d/3.0d), brightMat);

    /** combine all matrices into one 8 bit 3 channel rgb picture */
    Mat tempMat = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC3);
    List<Mat> mixSrcMats = new ArrayList<>();
    mixSrcMats.add(negativeMat); // 1 channel: 0
    mixSrcMats.add(positiveMat); // 1 channel: 1
    mixSrcMats.add(brightMat); // 1 channel: 2
    List<Mat> mixDstMats = new ArrayList<>();
    mixDstMats.add(tempMat); // 3 channels: 0-2
    MatOfInt fromToMat = new MatOfInt(0, 0 /* neg->red */, 2, 1/*
                                                               * avg->green
                                                               */, 1, 2 /*
                                                                         * pos-
                                                                         * >
                                                                         * blue
                                                                         */);
    Core.mixChannels(mixSrcMats, mixDstMats, fromToMat);

    return tempMat;
}

From source file:de.hftl_projekt.ict.MainActivity.java

/**
 * changes the brightness of the input matrix (image) and uses the given alpha/beta values
 * @param image input matrix/*from  w w  w  . j a  v a 2s  .com*/
 * @param alpha value
 * @param beta value
 * @return modified input matrix
 */
public Mat changeBrightness(Mat image, double alpha, double beta) {
    // use native OpenCV function to convert the image with the specified alpha and beta values
    // rType (secondParameter) defines the type of the output matrix,
    // if the number is blow 0, the output matrix will be the same type as the input matrix
    image.convertTo(image, -1, alpha, beta);
    return image;
}

From source file:fi.conf.tabare.ARDataProvider.java

private void detect() {

    //Mat composite_image;
    Mat input_image = new Mat();
    Mat undistorted_image = new Mat();
    Mat circles = new Mat();
    MatOfKeyPoint mokp = new MatOfKeyPoint();
    Mat cameraMatrix = null;/*  w  w w  . ja  v a2s  .c o  m*/

    //List<Mat> channels = new LinkedList<>();

    //Loop
    while (running) {
        try {
            if (inputVideo.read(input_image)) {
                Mat preview_image = null;

                if (selectedView == View.calib)
                    preview_image = input_image.clone();

                //Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_RGB2HSV);
                //Core.split(input_image, channels);

                Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_BGR2GRAY);

                //Imgproc.equalizeHist(input_image, input_image);

                input_image.convertTo(input_image, -1, params.contrast, params.brightness); //image*contrast[1.0-3.0] + brightness[0-255]

                doBlur(input_image, input_image, params.blur, params.blurAmount);

                if (selectedView == View.raw)
                    preview_image = input_image.clone();

                if (params.enableDistortion) {

                    if (cameraMatrix == null)
                        cameraMatrix = Imgproc.getDefaultNewCameraMatrix(Mat.eye(3, 3, CvType.CV_64F),
                                new Size(input_image.width(), input_image.height()), true);

                    Imgproc.warpAffine(input_image, input_image, shiftMat, frameSize);

                    if (undistorted_image == null)
                        undistorted_image = new Mat((int) frameSize.width * 2, (int) frameSize.height * 2,
                                CvType.CV_64F);

                    Imgproc.undistort(input_image, undistorted_image, cameraMatrix, distCoeffs);

                    input_image = undistorted_image.clone();

                    if (selectedView == View.dist)
                        preview_image = input_image.clone();

                }

                //               if(background == null) background = input_image.clone();         
                //               if(recaptureBg){
                //                  backgSubstractor.apply(background, background);
                //                  System.out.println(background.channels() + " " + background.size() );
                //                  System.out.println(input_image.channels() + " " + input_image.size() );
                //                  recaptureBg = false;
                //               }
                //               if(dynamicBGRemoval){
                //                  //Imgproc.accumulateWeighted(input_image, background, dynamicBGAmount);
                //                  //Imgproc.accumulateWeighted(input_image, background, 1.0f);
                //                  //Core.subtract(input_image, background, input_image);
                //                  //Core.bitwise_xor(input_image, background, input_image);
                //
                //                  doBlur(input_image, background, Blur.normal_7x7, 0); //Blur a little, to get nicer result when substracting
                //                  backgSubstractor.apply(background, background, dynamicBGAmount);
                //               }
                //               if(background != null) Core.add(input_image, background, input_image);

                if (params.blobTracking) {
                    Mat blobs_image = input_image.clone();

                    Imgproc.threshold(blobs_image, blobs_image, params.blobThreshold, 254,
                            (params.blobThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY));

                    Size kernelSize = null;

                    switch (params.blobMorpthKernelSize) {
                    case size_3x3:
                        kernelSize = new Size(3, 3);
                        break;
                    case size_5x5:
                        kernelSize = new Size(5, 5);
                        break;
                    case size_7x7:
                        kernelSize = new Size(7, 7);
                        break;
                    case size_9x9:
                        kernelSize = new Size(9, 9);
                        break;
                    }

                    int kernelType = -1;

                    switch (params.blobMorphKernelShape) {
                    case ellipse:
                        kernelType = Imgproc.MORPH_ELLIPSE;
                        break;
                    case rect:
                        kernelType = Imgproc.MORPH_RECT;
                        break;
                    default:
                        break;
                    }

                    switch (params.blobMorphOps) {
                    case dilate:
                        Imgproc.dilate(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    case erode:
                        Imgproc.erode(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    default:
                        break;
                    }

                    if (blobFeatureDetector == null)
                        blobFeatureDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);

                    blobFeatureDetector.detect(blobs_image, mokp);
                    blobData.add(mokp);

                    if (selectedView == View.blob)
                        preview_image = blobs_image.clone();

                    blobs_image.release();
                }

                if (params.tripTracking) {

                    Mat trips_image = undistorted_image.clone();

                    if (params.tripEnableThresholding)
                        if (params.tripAdaptThreshold) {
                            Imgproc.adaptiveThreshold(trips_image, trips_image, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY),
                                    Imgproc.ADAPTIVE_THRESH_MEAN_C, 5, params.tripThreshold * 0.256f);
                        } else {
                            Imgproc.threshold(trips_image, trips_image, params.tripThreshold, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV
                                            : Imgproc.THRESH_BINARY));
                        }

                    switch (params.tripMorphOps) {
                    case dilate:
                        Imgproc.dilate(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    case erode:
                        Imgproc.erode(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    default:
                        break;
                    }

                    //Imgproc.HoughCircles(tres, circ, Imgproc.CV_HOUGH_GRADIENT, 1, tres.height()/8, 80, 1+p.par4, p.par5, p.par6);
                    Imgproc.HoughCircles(trips_image, circles, Imgproc.CV_HOUGH_GRADIENT, params.tripDP,
                            params.tripCenterDist, params.tripCannyThresh, params.tripAccumThresh,
                            params.tripRadMin, params.tripRadMax);

                    for (int i = 0; i < circles.cols(); i++) {

                        double[] coords = circles.get(0, i);

                        if (coords == null || coords[0] <= 1 || coords[1] <= 1)
                            continue; //If the circle is off the limits, or too small, don't process it.

                        TripcodeCandidateSample tc = new TripcodeCandidateSample(undistorted_image, coords);

                        if (tc.isValid())
                            tripcodeData.add(tc);

                    }

                    if (selectedView == View.trip)
                        preview_image = trips_image.clone();
                    trips_image.release();

                }

                if (preview_image != null) {
                    camPreviewPanel.updatePreviewImage(preview_image);
                    preview_image.release();
                }

            } else {
                System.out.println("frame/cam failiure!");
            }

        } catch (Exception e) {
            e.printStackTrace();
            running = false;
        }

        //FPS calculations
        if (camPreviewPanel != null) {
            long t = System.currentTimeMillis();
            detectTime = (t - lastFrameDetectTime);
            lastFrameDetectTime = t;
            camPreviewPanel.updateDetectTime(detectTime);
        }

    }

    //De-init
    circles.release();
    undistorted_image.release();
    input_image.release();
    inputVideo.release();
    shiftMat.release();
}

From source file:tk.year.opencv.demo.filters.Contrast.java

License:Open Source License

@Override
public Mat filter(final Mat src) {

    final Mat dst = new Mat(src.rows(), src.cols(), src.type());
    src.convertTo(dst, -1, 10d * value / 100, 0);
    return dst;//from  ww w .java 2s .  c om
}

From source file:us.cboyd.android.dicom.DcmInfoFragment.java

License:Open Source License

public void updateDicomInfo() {
    mDicomObject = null;/*from  www.  j av a  2  s  .  c o m*/
    if ((mCurrDir != null) && (mFileList != null) && (mPosition >= 0) && (mPosition < mFileList.size())) {
        try {
            // Read in the DicomObject
            DicomInputStream dis = new DicomInputStream(new FileInputStream(getDicomFile()));
            //mDicomObject = dis.readFileMetaInformation();
            mDicomObject = dis.readDicomObject();
            dis.close();

            // Get the SOP Class element
            DicomElement de = mDicomObject.get(Tag.MediaStorageSOPClassUID);
            String SOPClass = "";
            if (de != null)
                SOPClass = de.getString(new SpecificCharacterSet(""), true);
            else
                SOPClass = "null";
            Log.i("cpb", "SOP Class: " + SOPClass);

            // TODO: DICOMDIR support
            if (SOPClass.equals(UID.MediaStorageDirectoryStorage)) {
                showImage(false);
                mErrText.setText(mRes.getString(R.string.err_dicomdir));
            } else {
                showImage(true);
                int rows = mDicomObject.getInt(Tag.Rows);
                int cols = mDicomObject.getInt(Tag.Columns);
                Mat temp = new Mat(rows, cols, CvType.CV_32S);
                temp.put(0, 0, mDicomObject.getInts(Tag.PixelData));
                // [Y, X] or [row, column]
                double[] spacing = mDicomObject.getDoubles(Tag.PixelSpacing);
                double scaleY2X = spacing[1] / spacing[0];

                // Determine the minmax
                Core.MinMaxLocResult minmax = Core.minMaxLoc(temp);
                double diff = minmax.maxVal - minmax.minVal;
                temp.convertTo(temp, CvType.CV_8UC1, 255.0d / diff, 0);

                // Set the image
                Bitmap imageBitmap = Bitmap.createBitmap(cols, rows, Bitmap.Config.ARGB_8888);
                Log.w("cpb", "test3");
                Utils.matToBitmap(temp, imageBitmap, true);
                Log.w("cpb", "test4");
                mImageView.setImageBitmap(imageBitmap);
                mImageView.setScaleX((float) scaleY2X);
            }

            // TODO: Add selector for info tag listing
            mTags = mRes.getStringArray(R.array.dcmtag_default);
            refreshTagList();

        } catch (Exception ex) {
            showImage(false);
            mErrText.setText(mRes.getString(R.string.err_file_read) + mFileList.get(mPosition) + "\n\n"
                    + ex.getMessage());
        }
    } else {
        showImage(false);
        mErrText.setText(mRes.getString(R.string.err_unknown_state));
    }
}

From source file:us.cboyd.android.shared.ImageContrastView.java

License:Open Source License

public void setImageContrastCV(double brightness, double contrast, int colormap, boolean inv) {
    double diff = getWidth();
    double ImWidth = (1 - (contrast / 100.0d)) * diff;
    double alpha = 255.0d / ImWidth;
    double beta = alpha * (-mMin);
    mLevel = ImWidth / 2.0d + (diff - ImWidth) * (1.0d - (brightness / 100.0d));
    mMax = ImWidth + (diff - ImWidth) * (1.0d - (brightness / 100.0d));
    mMin = (diff - ImWidth) * (1.0d - (brightness / 100.0d));

    int i = 0;/*from  ww  w.j av  a2 s .  co  m*/
    int n = (int) diff;
    Mat cmap = new Mat(1, n, CvType.CV_32S);
    for (i = 0; i < n; i++) {
        cmap.put(0, i, i);
    }
    if (inv) {
        alpha *= -1.0d;
        beta = 255.0d - beta;
    }
    cmap.convertTo(cmap, CvType.CV_8UC1, alpha, beta);
    if (colormap >= 0) {
        Contrib.applyColorMap(cmap, cmap, colormap);
        //applyColorMap returns a BGR image, but createBitmap expects RGB
        //do a conversion to swap blue and red channels:
        Imgproc.cvtColor(cmap, cmap, Imgproc.COLOR_RGB2BGR);
    }
    Bitmap cmapBitmap = Bitmap.createBitmap(n, 1, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(cmap, cmapBitmap, false);
    setImageBitmap(cmapBitmap);
}