Example usage for org.opencv.core Mat clone

List of usage examples for org.opencv.core Mat clone

Introduction

In this page you can find the example usage for org.opencv.core Mat clone.

Prototype

public Mat clone() 

Source Link

Usage

From source file:hu.unideb.fksz.VideoProcessor.java

License:Open Source License

/**
 * Does the main loop, if we reach the penultimate frame,
 * it means we have reached the end of the end of the video.
 *///from www  . j  a  va2 s.com
public void processVideo() {
    do {
        Mat tmp = new Mat();
        video.read(tmp);
        if (!tmp.empty()) {
            frame = tmp.clone();
            tmp.release();
            if (frameCounter < (getFrameCount() / 2) - 1) {
                frameCounter++;
                if (getMinutes() > 0) {
                    carsPerMinute = getDetectedCarsCount() / getMinutes();
                }

                processFrame(getFrame());
            } else {
                frameCounter = 0;
                finished = true;

                logger.trace("Restarting..");
                setFramePos(1);
            }
        } else {
            logger.warn("Empty image!");
            frameCounter = 0;
            finished = true;

            logger.trace("Restarting..");
            setFramePos(1);
        }
    } while (frameCounter > (getFrameCount() / 2) - 2);
}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

private Mat threshold_mask(Mat main_region) {
    Mat result = main_region.clone();

    Imgproc.threshold(main_region, result, Core.mean(main_region).val[0], 255, Imgproc.THRESH_TOZERO);

    return result;
}

From source file:info.jmfavreau.bifrostcore.imageprocessing.ImageToColor.java

License:Open Source License

private Mat smooth_image(Mat img) {
    Mat smoothed = img.clone();
    int diam = 20; // TODO: adjust this value depending of the image size
    int std_dev_color = 80; // TODO: fix these values
    int std_dev_space = 80;
    try {//from   www .  j a  va2 s  .c o  m
        Imgproc.bilateralFilter(img, smoothed, diam, std_dev_color, std_dev_space);
    } catch (CvException e) {
        Log.e("bifrostcore", e.getMessage());
    }
    return smoothed;
}

From source file:javaapplication1.Ocv.java

public void blendWithGray50(String input, String output) {
    // load the image and read it into a matrix
    File f2 = new File(input);
    Mat image = Highgui.imread(this.input);

    // clone the image, and convert it to grayscale
    Mat gray = image.clone();
    Imgproc.cvtColor(gray, gray, Imgproc.COLOR_BGR2GRAY, 1);
    Imgproc.cvtColor(gray, gray, Imgproc.COLOR_GRAY2BGR, 3);

    // blend the two images (equal weight) into a new matrix and save it
    Mat dst = new Mat();
    Core.addWeighted(image, .5f, gray, .5f, 0.0, dst);
    Highgui.imwrite(this.output, dst);
}

From source file:LetsStart.GUI.java

public GUI(String windowName, Mat newImage) {
    super();//from  ww w .j  av a2 s . c  o  m
    this.windowName = windowName;
    this.image = newImage;
    //this.a = newImage;  //new Mat (100,100, CvType.CV_16UC3 ,new Scalar (new double[]{1,1,1}));
    originalImage = newImage.clone();
    color = new Scalar(10, 10);
    processOperation();
    updateView();
}

From source file:mineshcvit.opendocscanner.CropImage.java

License:Apache License

private void makeDefault() {

    // minesh: finding the largest rect in the given image

    //Mat grayImage= Imgcodecs.imread(IMAGE_PATH, Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);

    //////////////////////
    /////////////from w w w  .  j a v  a  2s .  c  om

    Mat imgSource = new Mat();

    Utils.bitmapToMat(mBitmap, imgSource);
    //  Utils.bitmapToMat(bmp32, imgMAT);

    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);

    //Mat imgSource = Imgcodecs.imread(mImagePath,Imgcodecs.CV_LOAD_IMAGE_GRAYSCALE);
    Log.w("myApp", "image path from isnde makedefault() is " + mImagePath);

    int matwidth = imgSource.width();
    int matheight = imgSource.height();

    Log.w("myApp", "mat image width, from makedefault() is " + matwidth);
    Log.w("myApp", "mat image height from, makedefault() is " + matheight);

    Mat imageBin = new Mat();

    double threshold = Imgproc.threshold(imgSource, imageBin, 0, 255, Imgproc.THRESH_OTSU);
    Log.w("myApp", "otsu threshold is " + threshold);

    //for canny higher threshold is chosen as otsus threshold and lower threshold is half of the otsu threshold value
    Imgproc.Canny(imgSource.clone(), imgSource, threshold * 0.5, threshold);

    // Imgcodecs.imwrite(mImagePath, imgSource);

    // int canny_height=imgSource.height();
    //   int canny_width=imgSource.width();

    // Log.w("myApp", "canny image height is "+canny_height);

    Imgproc.GaussianBlur(imgSource, imgSource, new org.opencv.core.Size(3, 3), 3);
    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    //MatVector contours = new MatVector();

    Imgproc.findContours(imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();

    for (int idx = 0; idx < contours.size(); idx++) {
        temp_contour = contours.get(idx);
        double contourarea = Imgproc.contourArea(temp_contour);
        // compare this contour to the previous largest contour found
        if (contourarea > maxArea) {
            // check if this contour is a square
            MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
            int contourSize = (int) temp_contour.total();
            MatOfPoint2f approxCurve_temp = new MatOfPoint2f();
            Imgproc.approxPolyDP(new_mat, approxCurve_temp, contourSize * 0.05, true);
            if (approxCurve_temp.total() == 4) {
                maxArea = contourarea;
                approxCurve = approxCurve_temp;
            }
        }
    }
    double[] temp_double;
    temp_double = approxCurve.get(0, 0);
    Point p1 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p1,55,new Scalar(0,0,255));
    // Imgproc.warpAffine(sourceImage, dummy, rotImage,sourceImage.size());
    temp_double = approxCurve.get(1, 0);
    Point p2 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p2,150,new Scalar(255,255,255));
    temp_double = approxCurve.get(2, 0);
    Point p3 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p3,200,new Scalar(255,0,0));
    temp_double = approxCurve.get(3, 0);
    Point p4 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p4,100,new Scalar(0,0,255));
    ArrayList<Point> source = new ArrayList<Point>();
    ArrayList<Point> topPoints = new ArrayList<Point>();
    ArrayList<Point> bottomPoints = new ArrayList<Point>();
    ArrayList<Point> sortedPoints = new ArrayList<Point>();

    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);

    Collections.sort(source, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.y, o2.y);
        }
    });

    topPoints.add(source.get(0));
    topPoints.add(source.get(1));

    Collections.sort(topPoints, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.x, o2.x);
        }
    });

    bottomPoints.add(source.get(2));
    bottomPoints.add(source.get(3));

    Collections.sort(bottomPoints, new Comparator<Point>() {

        public int compare(Point o1, Point o2) {
            return Double.compare(o1.x, o2.x);
        }
    });

    sortedPoints.add(topPoints.get(0));//top left
    sortedPoints.add(bottomPoints.get(0));//bottom left
    sortedPoints.add(bottomPoints.get(1));//bottom right
    sortedPoints.add(topPoints.get(1));//top right

    /*
    c++ code to sort the points
            
    void sortCorners(std::vector<cv::Point2f>& corners, cv::Point2f center)
    {
    std::vector<cv::Point2f> top, bot;
            
    for (int i = 0; i < corners.size(); i++)
    {
    if (corners[i].y < center.y)
    top.push_back(corners[i]);
    else
    bot.push_back(corners[i]);
    }
            
    cv::Point2f tl = top[0].x > top[1].x ? top[1] : top[0];
    cv::Point2f tr = top[0].x > top[1].x ? top[0] : top[1];
    cv::Point2f bl = bot[0].x > bot[1].x ? bot[1] : bot[0];
    cv::Point2f br = bot[0].x > bot[1].x ? bot[0] : bot[1];
            
    corners.clear();
    corners.push_back(tl);
    corners.push_back(tr);
    corners.push_back(br);
    corners.push_back(bl);
    }
            
    ...
            
    // Get mass center
    cv::Point2f center(0,0);
    for (int i = 0; i < corners.size(); i++)
    center += corners[i];
            
    center *= (1. / corners.size());
    sortCorners(corners, center);
            
            
            
     */

    //p1 t0 p4 are in the anti clock wise order starting from top left

    // double s=source.get(0).x;

    /////////////////
    /////////////////
    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Log.w("myApp", "bitmap width is " + width);
    Log.w("myApp", "bitmap height is " + height);

    Rect imageRect = new Rect(0, 0, width, height);

    // make the default size about 4/5 of the width or height

    /*
            
            int cropWidth = Math.min(width, height) * 4 / 5;
            int cropHeight = cropWidth;
            
            
            int x = (width - cropWidth) / 2;
            int y = (height - cropHeight) / 2;
            
            RectF cropRect = new RectF(x, y, x + cropWidth, y + cropHeight);
            
    */
    /// To test the points order

    /*
    Point p1 = new Point(1.0*x,1.0*y );
    Point p2 = new Point(1.0*x+150.0,1.0*y+1.0*cropHeight);
            
    Point p3 = new Point(1.0*x+1.0*cropWidth,1.0*y+1.0*cropHeight);
            
    Point p4 = new Point(1.0*x+1.0*cropWidth,1.0*y);
            
    ArrayList<Point> source = new ArrayList<Point>();
    source.add(p1);
    source.add(p2);
    source.add(p3);
    source.add(p4);
            
    */
    ////////////////////////////

    Log.w("myApp",
            "from inside makedeafult inside cropimage calss, default crop rect values are set and now highlight view will be initiated ");

    HighlightView hv = new HighlightView(mImageView, imageRect, sortedPoints);

    Log.w("myApp", "higlight view initiated; done");

    mImageView.add(hv);
    Log.w("myApp", "add hv is done; done");

    mImageView.invalidate();
    mCrop = hv;

    Log.w("myApp", "mcrop=hv donee");
    mCrop.setFocus(true);
    ;
}

From source file:news_analysis.NewsAnalysis.java

public static void main(String[] args) throws IOException {
    file = new File("F:\\AbcFile\\filename.txt");
    if (!file.exists()) {
        file.createNewFile();// ww  w .  j  a va 2s. c  o m
    }
    fw = new FileWriter(file.getAbsoluteFile());
    bw = new BufferedWriter(fw);
    bw.flush();
    // Load an image file and display it in a window.
    Mat m1 = Highgui.imread("E:\\Raw Java Project\\Thesis\\test6.jpg");
    //imshow("Original", m1);

    // Do some image processing on the image and display in another window.
    Mat m2 = new Mat();
    Imgproc.bilateralFilter(m1, m2, -1, 50, 10);
    Imgproc.Canny(m2, m2, 10, 200);
    imshow("Edge Detected", m2);
    Size sizeA = m2.size();
    System.out.println("width: " + sizeA.width + " Height: " + sizeA.height);
    int width = (int) sizeA.width;
    int hight = (int) sizeA.height;
    int pointLength[][][] = new int[hight][width][2];
    for (int i = 0; i < hight; i++) {
        for (int j = 0; j < width; j++) {
            double[] data = m2.get(i, j);
            if (m2.get(i, j)[0] != 0) {
                pointLength[i][j][0] = 0;
                pointLength[i][j][1] = 0;
                continue;
            }
            if (j != 0 && m2.get(i, j - 1)[0] == 0) {
                pointLength[i][j][0] = pointLength[i][j - 1][0];
            } else {
                int count = 0;
                for (int k = j + 1; k < width; k++) {
                    if (m2.get(i, k)[0] == 0) {
                        count++;
                    } else {
                        break;
                    }
                }
                pointLength[i][j][0] = count;
            }
            if (i != 0 && m2.get(i - 1, j)[0] == 0) {
                pointLength[i][j][1] = pointLength[i - 1][j][1];
            } else {
                int count = 0;
                for (int k = i + 1; k < hight; k++) {
                    if (m2.get(k, j)[0] == 0) {
                        count++;
                    } else {
                        break;
                    }
                }
                pointLength[i][j][1] = count;
            }

            //System.out.println(data[0]);
        }
    }
    String temp = "";
    Mat convertArea = m2.clone();

    int[][] balckWhite = new int[hight][width];

    for (int i = 0; i < hight; i++) {
        temp = "";
        for (int j = 0; j < width; j++) {
            if (i == 0 || j == 0 || i == hight - 1 || j == width - 1) {
                temp = temp + "@";
                balckWhite[i][j] = 1;

                double[] data = m2.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else if (pointLength[i][j][0] > 150 && pointLength[i][j][1] > 6) {
                temp = temp + "@";
                balckWhite[i][j] = 1;

                double[] data = m2.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else if (pointLength[i][j][0] > 7 && pointLength[i][j][1] > 200) {
                temp = temp + "@";
                balckWhite[i][j] = 1;

                double[] data = m2.get(i, j);
                data[0] = 255.0;
                convertArea.put(i, j, data);
            } else {
                temp = temp + " ";
                balckWhite[i][j] = 0;

                double[] data = m2.get(i, j);
                data[0] = 0.0;
                convertArea.put(i, j, data);
            }

        }
        //filewrile(temp);
    }
    imshow("Convertion", convertArea);
    IsImage isImage = new IsImage();
    HeadLineDetection isHeadline = new HeadLineDetection();

    ImageBorderDetectionBFS imgBFS = new ImageBorderDetectionBFS();
    ArrayList<BorderItem> borderItems = imgBFS.getBorder(balckWhite, width, hight);
    Mat[] subMat = new Mat[borderItems.size()];
    for (int i = 0; i < borderItems.size(); i++) {
        subMat[i] = m2.submat(borderItems.get(i).getMinX(), borderItems.get(i).getMaxX(),
                borderItems.get(i).getMinY(), borderItems.get(i).getMaxY());
        if (isImage.isImage(subMat[i])) {
            System.out.println("subMat" + i + " is an image");
            //imshow("subMat" + i, subMat[i]);

        } else if (isHeadline.isHeadLine(subMat[i])) {
            System.out.println("subMat" + i + " is an Headline");
            //imshow("Headline" + i, subMat[i]);
        } else {
            System.out.println("subMat" + i + " is an Column");
            imshow("Column" + i, subMat[i]);
        }
        //imshow("subMat" + i, subMat[i]);
        bw.close();

    }

}

From source file:opencv.CaptchaDetection.java

/***
 * ??, ROI/*  w  w w  . j  a v  a 2s  . co  m*/
 * @param src
 * @return 
 */
private static List<Mat> find_number(Mat src) {
    Mat src_tmp = src.clone();

    //  
    Imgproc.dilate(src_tmp, src_tmp, new Mat());

    //  ?
    Mat canny_edge = new Mat();
    Imgproc.blur(src_tmp, src_tmp, new Size(3, 3));
    Imgproc.Canny(src_tmp, canny_edge, 50, 150, 3, false);

    //  
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(canny_edge, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    List<Rect> boundRect = new ArrayList<>();

    //  ??, ??
    for (int i = 0; i < contours.size(); i++) {
        MatOfPoint2f tmp_mp2f_1 = new MatOfPoint2f();
        MatOfPoint2f tmp_mp2f_2 = new MatOfPoint2f();

        contours.get(i).convertTo(tmp_mp2f_1, CvType.CV_32FC2);

        Imgproc.approxPolyDP(tmp_mp2f_1, tmp_mp2f_2, 3, true);

        tmp_mp2f_2.convertTo(contours.get(i), CvType.CV_32S);

        Rect rect = Imgproc.boundingRect(contours.get(i));

        //if (rect.area() > 300)
        //out.println("h : " + rect.height + ", w : " + rect.width + ", aera :  " + rect.area());

        if (rect.height >= 21 && rect.width >= 21 && rect.area() >= 700)
            boundRect.add(rect);
    }

    //  ??
    for (Rect rect : boundRect) {
        Scalar color = new Scalar(128);
        Imgproc.rectangle(src_tmp, rect.tl(), rect.br(), color, 2, 8, 0);
    }

    //  ???
    Collections.sort(boundRect, rectSort);

    List<Mat> numRoi = new ArrayList<>();
    for (Rect rect : boundRect)
        numRoi.add(src.submat(rect));

    //for (Mat roi : numRoi) 
    //showResult(roi, "roi");

    return numRoi;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.detect.DetectStripTask.java

License:Open Source License

@Nullable
@Override//from   ww w  .j ava2s.  co  m
protected Void doInBackground(Intent... params) {
    Intent intent = params[0];

    if (intent == null) {
        return null;
    }

    String uuid = intent.getStringExtra(Constant.UUID);

    StripTest stripTest = new StripTest();
    int numPatches = stripTest.getPatchCount(uuid);

    format = intent.getIntExtra(Constant.FORMAT, ImageFormat.NV21);
    width = intent.getIntExtra(Constant.WIDTH, 0);
    height = intent.getIntExtra(Constant.HEIGHT, 0);

    if (width == 0 || height == 0) {
        return null;
    }

    JSONArray imagePatchArray = null;
    int imageCount = -1;
    Mat labImg; // Mat for image from NV21 data
    Mat labStrip; // Mat for detected strip

    try {
        String json = FileUtil.readFromInternalStorage(context, Constant.IMAGE_PATCH);
        imagePatchArray = new JSONArray(json);
    } catch (Exception e) {
        Timber.e(e);
    }

    for (int i = 0; i < numPatches; i++) {
        try {
            if (imagePatchArray != null) {
                // sub-array for each patch
                JSONArray array = imagePatchArray.getJSONArray(i);

                // get the image number from the json array
                int imageNo = array.getInt(0);

                if (imageNo > imageCount) {

                    // Set imageCount to current number
                    imageCount = imageNo;

                    byte[] data = FileUtil.readByteArray(context, Constant.DATA + imageNo);
                    if (data == null) {
                        throw new IOException();
                    }

                    //make a L,A,B Mat object from data
                    try {
                        labImg = makeLab(data);
                    } catch (Exception e) {
                        if (context != null) {
                            Timber.e(e);
                        }
                        continue;
                    }

                    //perspectiveTransform
                    try {
                        if (labImg != null) {
                            warp(labImg, imageNo);
                        }
                    } catch (Exception e) {
                        if (context != null) {
                            Timber.e(e);
                        }
                        continue;
                    }

                    //divide into calibration and strip areas
                    try {
                        if (context != null) {
                            divideIntoCalibrationAndStripArea();
                        }
                    } catch (Exception e) {
                        Timber.e(e);
                        continue;
                    }

                    //save warped image to external storage
                    //                        if (DEVELOP_MODE) {
                    //                        Mat rgb = new Mat();
                    //                        Imgproc.cvtColor(warpMat, rgb, Imgproc.COLOR_Lab2RGB);
                    //                        Bitmap bitmap = Bitmap.createBitmap(rgb.width(), rgb.height(), Bitmap.Config.ARGB_8888);
                    //                        Utils.matToBitmap(rgb, bitmap);
                    //
                    //                        //if (FileUtil.isExternalStorageWritable()) {
                    //                        FileUtil.writeBitmapToExternalStorage(bitmap, "/warp", UUID.randomUUID().toString() + ".png");
                    //}
                    //                            //Bitmap.createScaledBitmap(bitmap, BITMAP_SCALED_WIDTH, BITMAP_SCALED_HEIGHT, false);
                    //                        }

                    //calibrate
                    Mat calibrationMat;
                    try {
                        CalibrationResultData calResult = getCalibratedImage(warpMat);
                        if (calResult == null) {
                            return null;
                        } else {
                            calibrationMat = calResult.getCalibratedImage();
                        }

                        //                            Log.d(this.getClass().getSimpleName(), "E94 error mean: " + String.format(Locale.US, "%.2f", calResult.meanE94)
                        //                                    + ", max: " + String.format(Locale.US, "%.2f", calResult.maxE94)
                        //                                    + ", total: " + String.format(Locale.US, "%.2f", calResult.totalE94));

                        //                            if (AppPreferences.isDiagnosticMode()) {
                        //                                listener.showError("E94 mean: " + String.format(Locale.US, "%.2f", calResult.meanE94)
                        //                                        + ", max: " + String.format(Locale.US, "%.2f", calResult.maxE94)
                        //                                        + ", total: " + String.format(Locale.US, "%.2f", calResult.totalE94));
                        //                            }
                    } catch (Exception e) {
                        Timber.e(e);
                        return null;
                    }

                    //show calibrated image
                    //                        if (DEVELOP_MODE) {
                    //                            Mat rgb = new Mat();
                    //                            Imgproc.cvtColor(calibrationMat, rgb, Imgproc.COLOR_Lab2RGB);
                    //                            Bitmap bitmap = Bitmap.createBitmap(rgb.width(), rgb.height(), Bitmap.Config.ARGB_8888);
                    //                            Utils.matToBitmap(rgb, bitmap);
                    //                            if (FileUtil.isExternalStorageWritable()) {
                    //                                FileUtil.writeBitmapToExternalStorage(bitmap, "/warp", UUID.randomUUID().toString() + "_cal.png");
                    //                            }
                    //                            //Bitmap.createScaledBitmap(bitmap, BITMAP_SCALED_WIDTH, BITMAP_SCALED_HEIGHT, false);
                    //                        }

                    // cut out black area that contains the strip
                    Mat stripArea = null;
                    if (roiStripArea != null) {
                        stripArea = calibrationMat.submat(roiStripArea);
                    }

                    if (stripArea != null) {
                        Mat strip = null;
                        try {
                            StripTest.Brand brand = stripTest.getBrand(uuid);
                            strip = OpenCVUtil.detectStrip(stripArea, brand, ratioW, ratioH);
                        } catch (Exception e) {
                            Timber.e(e);
                        }

                        String error = "";
                        if (strip != null) {
                            labStrip = strip.clone();
                        } else {
                            if (context != null) {
                                Timber.e(context.getString(R.string.error_calibrating));
                            }
                            labStrip = stripArea.clone();

                            error = Constant.ERROR;

                            //draw a red cross over the image
                            Scalar red = RED_LAB_COLOR; // Lab color
                            Imgproc.line(labStrip, new Point(0, 0), new Point(labStrip.cols(), labStrip.rows()),
                                    red, 2);
                            Imgproc.line(labStrip, new Point(0, labStrip.rows()), new Point(labStrip.cols(), 0),
                                    red, 2);
                        }

                        try {
                            // create byte[] from Mat and store it in internal storage
                            // In order to restore the byte array, we also need the rows and columns dimensions
                            // these are stored in the last 8 bytes
                            int dataSize = labStrip.cols() * labStrip.rows() * 3;
                            byte[] payload = new byte[dataSize + 8];
                            byte[] matByteArray = new byte[dataSize];

                            labStrip.get(0, 0, matByteArray);

                            // pack cols and rows into byte arrays
                            byte[] rows = FileUtil.leIntToByteArray(labStrip.rows());
                            byte[] cols = FileUtil.leIntToByteArray(labStrip.cols());

                            // append them to the end of the array, in order rows, cols
                            System.arraycopy(matByteArray, 0, payload, 0, dataSize);
                            System.arraycopy(rows, 0, payload, dataSize, 4);
                            System.arraycopy(cols, 0, payload, dataSize + 4, 4);
                            FileUtil.writeByteArray(context, payload, Constant.STRIP + imageNo + error);
                        } catch (Exception e) {
                            Timber.e(e);
                        }
                    }
                }
            }
        } catch (@NonNull JSONException | IOException e) {

            if (context != null) {
                Timber.e(context.getString(R.string.error_cut_out_strip));
            }
        }
    }
    return null;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.OpenCVUtil.java

License:Open Source License

@SuppressWarnings("UnusedParameters")
public static Mat detectStrip(Mat stripArea, StripTest.Brand brand, double ratioW, double ratioH) {
    List<Mat> channels = new ArrayList<>();
    Mat sArea = stripArea.clone();

    // Gaussian blur
    Imgproc.medianBlur(sArea, sArea, 3);
    Core.split(sArea, channels);//w ww  .j  a  v a2s.co  m

    // create binary image
    Mat binary = new Mat();

    // determine min and max NOT USED
    Imgproc.threshold(channels.get(0), binary, 128, MAX_RGB_INT_VALUE, Imgproc.THRESH_BINARY);

    // compute first approximation of line through length of the strip
    final WeightedObservedPoints points = new WeightedObservedPoints();
    final WeightedObservedPoints corrPoints = new WeightedObservedPoints();

    double tot, yTot;
    for (int i = 0; i < binary.cols(); i++) { // iterate over cols
        tot = 0;
        yTot = 0;
        for (int j = 0; j < binary.rows(); j++) { // iterate over rows
            if (binary.get(j, i)[0] > 128) {
                yTot += j;
                tot++;
            }
        }
        if (tot > 0) {
            points.add((double) i, yTot / tot);
        }
    }

    // order of coefficients is (b + ax), so [b, a]
    final PolynomialCurveFitter fitter = PolynomialCurveFitter.create(1);
    List<WeightedObservedPoint> pointsList = points.toList();
    final double[] coefficient = fitter.fit(pointsList);

    // second pass, remove outliers
    double estimate, actual;

    for (int i = 0; i < pointsList.size(); i++) {
        estimate = coefficient[1] * pointsList.get(i).getX() + coefficient[0];
        actual = pointsList.get(i).getY();
        if (actual > LOWER_PERCENTAGE_BOUND * estimate && actual < UPPER_PERCENTAGE_BOUND * estimate) {
            //if the point differs less than +/- 10%, keep the point
            corrPoints.add(pointsList.get(i).getX(), pointsList.get(i).getY());
        }
    }

    final double[] coefficientCorr = fitter.fit(corrPoints.toList());
    double slope = coefficientCorr[1];
    double offset = coefficientCorr[0];

    // compute rotation angle
    double rotAngleDeg = Math.atan(slope) * 180 / Math.PI;

    //determine a point on the line, in the middle of strip, in the horizontal middle of the whole image
    int midPointX = binary.cols() / 2;
    int midPointY = (int) Math.round(midPointX * slope + offset);

    // rotate around the midpoint, to straighten the binary strip
    Mat dstBinary = new Mat(binary.rows(), binary.cols(), binary.type());
    Point center = new Point(midPointX, midPointY);
    Mat rotMat = Imgproc.getRotationMatrix2D(center, rotAngleDeg, 1.0);
    Imgproc.warpAffine(binary, dstBinary, rotMat, binary.size(),
            Imgproc.INTER_CUBIC + Imgproc.WARP_FILL_OUTLIERS);

    // also apply rotation to colored strip
    Mat dstStrip = new Mat(stripArea.rows(), stripArea.cols(), stripArea.type());
    Imgproc.warpAffine(stripArea, dstStrip, rotMat, binary.size(),
            Imgproc.INTER_CUBIC + Imgproc.WARP_FILL_OUTLIERS);

    // Compute white points in each row
    double[] rowCount = new double[dstBinary.rows()];
    int rowTot;
    for (int i = 0; i < dstBinary.rows(); i++) { // iterate over rows
        rowTot = 0;
        for (int j = 0; j < dstBinary.cols(); j++) { // iterate over cols
            if (dstBinary.get(i, j)[0] > 128) {
                rowTot++;
            }
        }
        rowCount[i] = rowTot;
    }

    // find width by finding rising and dropping edges
    // rising edge  = largest positive difference
    // falling edge = largest negative difference
    int risePos = 0;
    int fallPos = 0;
    double riseVal = 0;
    double fallVal = 0;
    for (int i = 0; i < dstBinary.rows() - 1; i++) {
        if (rowCount[i + 1] - rowCount[i] > riseVal) {
            riseVal = rowCount[i + 1] - rowCount[i];
            risePos = i + 1;
        }
        if (rowCount[i + 1] - rowCount[i] < fallVal) {
            fallVal = rowCount[i + 1] - rowCount[i];
            fallPos = i;
        }
    }

    // cut out binary strip
    Point stripTopLeft = new Point(0, risePos);
    Point stripBottomRight = new Point(dstBinary.cols(), fallPos);

    org.opencv.core.Rect stripAreaRect = new org.opencv.core.Rect(stripTopLeft, stripBottomRight);
    Mat binaryStrip = dstBinary.submat(stripAreaRect);

    // also cut out colored strip
    Mat colorStrip = dstStrip.submat(stripAreaRect);

    // now right end of strip
    // method: first rising edge

    double[] colCount = new double[binaryStrip.cols()];
    int colTotal;
    for (int i = 0; i < binaryStrip.cols(); i++) { // iterate over cols
        colTotal = 0;
        for (int j = 0; j < binaryStrip.rows(); j++) { // iterate over rows
            if (binaryStrip.get(j, i)[0] > 128) {
                colTotal++;
            }
        }

        //Log.d("Caddisfly", String.valueOf(colTotal));
        colCount[i] = colTotal;
    }

    stripAreaRect = getStripRectangle(binaryStrip, colCount, brand.getStripLength(), ratioW);

    Mat resultStrip = colorStrip.submat(stripAreaRect).clone();

    // release Mat objects
    stripArea.release();
    sArea.release();
    binary.release();
    dstBinary.release();
    dstStrip.release();
    binaryStrip.release();
    colorStrip.release();

    return resultStrip;
}