Example usage for org.opencv.core Mat release

List of usage examples for org.opencv.core Mat release

Introduction

In this page you can find the example usage for org.opencv.core Mat release.

Prototype

public void release() 

Source Link

Usage

From source file:fi.conf.tabare.ARDataProvider.java

private void detect() {

    //Mat composite_image;
    Mat input_image = new Mat();
    Mat undistorted_image = new Mat();
    Mat circles = new Mat();
    MatOfKeyPoint mokp = new MatOfKeyPoint();
    Mat cameraMatrix = null;/* w ww .jav  a 2s .  com*/

    //List<Mat> channels = new LinkedList<>();

    //Loop
    while (running) {
        try {
            if (inputVideo.read(input_image)) {
                Mat preview_image = null;

                if (selectedView == View.calib)
                    preview_image = input_image.clone();

                //Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_RGB2HSV);
                //Core.split(input_image, channels);

                Imgproc.cvtColor(input_image, input_image, Imgproc.COLOR_BGR2GRAY);

                //Imgproc.equalizeHist(input_image, input_image);

                input_image.convertTo(input_image, -1, params.contrast, params.brightness); //image*contrast[1.0-3.0] + brightness[0-255]

                doBlur(input_image, input_image, params.blur, params.blurAmount);

                if (selectedView == View.raw)
                    preview_image = input_image.clone();

                if (params.enableDistortion) {

                    if (cameraMatrix == null)
                        cameraMatrix = Imgproc.getDefaultNewCameraMatrix(Mat.eye(3, 3, CvType.CV_64F),
                                new Size(input_image.width(), input_image.height()), true);

                    Imgproc.warpAffine(input_image, input_image, shiftMat, frameSize);

                    if (undistorted_image == null)
                        undistorted_image = new Mat((int) frameSize.width * 2, (int) frameSize.height * 2,
                                CvType.CV_64F);

                    Imgproc.undistort(input_image, undistorted_image, cameraMatrix, distCoeffs);

                    input_image = undistorted_image.clone();

                    if (selectedView == View.dist)
                        preview_image = input_image.clone();

                }

                //               if(background == null) background = input_image.clone();         
                //               if(recaptureBg){
                //                  backgSubstractor.apply(background, background);
                //                  System.out.println(background.channels() + " " + background.size() );
                //                  System.out.println(input_image.channels() + " " + input_image.size() );
                //                  recaptureBg = false;
                //               }
                //               if(dynamicBGRemoval){
                //                  //Imgproc.accumulateWeighted(input_image, background, dynamicBGAmount);
                //                  //Imgproc.accumulateWeighted(input_image, background, 1.0f);
                //                  //Core.subtract(input_image, background, input_image);
                //                  //Core.bitwise_xor(input_image, background, input_image);
                //
                //                  doBlur(input_image, background, Blur.normal_7x7, 0); //Blur a little, to get nicer result when substracting
                //                  backgSubstractor.apply(background, background, dynamicBGAmount);
                //               }
                //               if(background != null) Core.add(input_image, background, input_image);

                if (params.blobTracking) {
                    Mat blobs_image = input_image.clone();

                    Imgproc.threshold(blobs_image, blobs_image, params.blobThreshold, 254,
                            (params.blobThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY));

                    Size kernelSize = null;

                    switch (params.blobMorpthKernelSize) {
                    case size_3x3:
                        kernelSize = new Size(3, 3);
                        break;
                    case size_5x5:
                        kernelSize = new Size(5, 5);
                        break;
                    case size_7x7:
                        kernelSize = new Size(7, 7);
                        break;
                    case size_9x9:
                        kernelSize = new Size(9, 9);
                        break;
                    }

                    int kernelType = -1;

                    switch (params.blobMorphKernelShape) {
                    case ellipse:
                        kernelType = Imgproc.MORPH_ELLIPSE;
                        break;
                    case rect:
                        kernelType = Imgproc.MORPH_RECT;
                        break;
                    default:
                        break;
                    }

                    switch (params.blobMorphOps) {
                    case dilate:
                        Imgproc.dilate(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    case erode:
                        Imgproc.erode(blobs_image, blobs_image,
                                Imgproc.getStructuringElement(kernelType, kernelSize));
                        break;
                    default:
                        break;
                    }

                    if (blobFeatureDetector == null)
                        blobFeatureDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);

                    blobFeatureDetector.detect(blobs_image, mokp);
                    blobData.add(mokp);

                    if (selectedView == View.blob)
                        preview_image = blobs_image.clone();

                    blobs_image.release();
                }

                if (params.tripTracking) {

                    Mat trips_image = undistorted_image.clone();

                    if (params.tripEnableThresholding)
                        if (params.tripAdaptThreshold) {
                            Imgproc.adaptiveThreshold(trips_image, trips_image, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV : Imgproc.THRESH_BINARY),
                                    Imgproc.ADAPTIVE_THRESH_MEAN_C, 5, params.tripThreshold * 0.256f);
                        } else {
                            Imgproc.threshold(trips_image, trips_image, params.tripThreshold, 255,
                                    (params.tripThInverted ? Imgproc.THRESH_BINARY_INV
                                            : Imgproc.THRESH_BINARY));
                        }

                    switch (params.tripMorphOps) {
                    case dilate:
                        Imgproc.dilate(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    case erode:
                        Imgproc.erode(trips_image, trips_image,
                                Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(3, 3)));
                        break;
                    default:
                        break;
                    }

                    //Imgproc.HoughCircles(tres, circ, Imgproc.CV_HOUGH_GRADIENT, 1, tres.height()/8, 80, 1+p.par4, p.par5, p.par6);
                    Imgproc.HoughCircles(trips_image, circles, Imgproc.CV_HOUGH_GRADIENT, params.tripDP,
                            params.tripCenterDist, params.tripCannyThresh, params.tripAccumThresh,
                            params.tripRadMin, params.tripRadMax);

                    for (int i = 0; i < circles.cols(); i++) {

                        double[] coords = circles.get(0, i);

                        if (coords == null || coords[0] <= 1 || coords[1] <= 1)
                            continue; //If the circle is off the limits, or too small, don't process it.

                        TripcodeCandidateSample tc = new TripcodeCandidateSample(undistorted_image, coords);

                        if (tc.isValid())
                            tripcodeData.add(tc);

                    }

                    if (selectedView == View.trip)
                        preview_image = trips_image.clone();
                    trips_image.release();

                }

                if (preview_image != null) {
                    camPreviewPanel.updatePreviewImage(preview_image);
                    preview_image.release();
                }

            } else {
                System.out.println("frame/cam failiure!");
            }

        } catch (Exception e) {
            e.printStackTrace();
            running = false;
        }

        //FPS calculations
        if (camPreviewPanel != null) {
            long t = System.currentTimeMillis();
            detectTime = (t - lastFrameDetectTime);
            lastFrameDetectTime = t;
            camPreviewPanel.updateDetectTime(detectTime);
        }

    }

    //De-init
    circles.release();
    undistorted_image.release();
    input_image.release();
    inputVideo.release();
    shiftMat.release();
}

From source file:hotgoaldetection.Webcam.java

public static ImagePanel createPanel(final Mat camera, String frameName) throws IOException {
    JFrame frame = new JFrame();
    frame.addWindowListener(new WindowAdapter() {
        @Override/*  ww w.  j  a  v a2s  .  co m*/
        public void windowClosing(WindowEvent e) {
            camera.release();
            System.exit(0);
        }
    });
    ImagePanel panel = new ImagePanel();
    panel.setPreferredSize(new Dimension(WIDTH, HEIGHT));
    frame.getContentPane().add(panel);
    frame.pack();
    frame.setLocationByPlatform(true);
    frame.setName(frameName);

    //frame.getOwner().setName(frameName);
    frame.setVisible(true);
    return panel;
}

From source file:hu.unideb.fksz.VideoProcessor.java

License:Open Source License

/**
 * Does the main loop, if we reach the penultimate frame,
 * it means we have reached the end of the end of the video.
 *//*from w  w w.  ja  v  a  2 s. c o  m*/
public void processVideo() {
    do {
        Mat tmp = new Mat();
        video.read(tmp);
        if (!tmp.empty()) {
            frame = tmp.clone();
            tmp.release();
            if (frameCounter < (getFrameCount() / 2) - 1) {
                frameCounter++;
                if (getMinutes() > 0) {
                    carsPerMinute = getDetectedCarsCount() / getMinutes();
                }

                processFrame(getFrame());
            } else {
                frameCounter = 0;
                finished = true;

                logger.trace("Restarting..");
                setFramePos(1);
            }
        } else {
            logger.warn("Empty image!");
            frameCounter = 0;
            finished = true;

            logger.trace("Restarting..");
            setFramePos(1);
        }
    } while (frameCounter > (getFrameCount() / 2) - 2);
}

From source file:hu.unideb.fksz.VideoProcessor.java

License:Open Source License

/**
 * Returns an {@code Image}, converted from a {@code Mat}.
 *
 * @param frameToConvert   The frame to be converted to a {@code Image}
 * @return   The {@code Image}, converted from a {@code Mat}
 *///  w  w  w .  ja va 2s.  c o  m
public Image convertCvMatToImage(Mat frameToConvert) {
    if (!buffer.empty()) {
        buffer.release();
    }
    try {
        Imgproc.resize(frameToConvert, frameToConvert, frameSize);
        Imgcodecs.imencode(".jpg", frameToConvert, buffer, params);
    } catch (Exception e) {
        logger.error(e.getMessage());
    }
    fxImage = new Image(new ByteArrayInputStream(buffer.toArray()));
    if (!frameToConvert.empty()) {
        frameToConvert.release(); /////
    }

    return fxImage;
}

From source file:in.fabinpaul.sixthsense.ColorBlobDetectionFragment.java

License:Apache License

@Override
public boolean onTouch(View v, MotionEvent event) {
    if (event.getAction() == MotionEvent.ACTION_DOWN) {
        count++;//from  w  w  w.j a v a2 s .  c o  m
        if (count > 3)
            count = 0;

        if (count == 3) {
            colorMarkerSet = true;
            comm.saveButtonVisibility();
        }

        int cols = mRgba.cols();
        int rows = mRgba.rows();

        int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
        int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
        Log.i(TAG, "x coordinates" + event.getX() + "y coordinates" + event.getY());
        Log.i(TAG, "View width" + mOpenCvCameraView.getWidth() + "View Height" + mOpenCvCameraView.getHeight());

        int x = (int) event.getX() - xOffset;
        int y = (int) event.getY() - yOffset;

        if ((x < 0) || (y < 0) || (x > cols) || (y > rows))
            return false;

        Rect touchedRect = new Rect();

        touchedRect.x = (x > 4) ? x - 4 : 0;
        touchedRect.y = (y > 4) ? y - 4 : 0;

        touchedRect.width = (x + 4 < cols) ? x - 1 - touchedRect.x : cols - touchedRect.x;
        touchedRect.height = (y + 4 < rows) ? y - 1 - touchedRect.y : rows - touchedRect.y;

        Log.i(TAG, "Width" + touchedRect.width + " Height" + touchedRect.height);
        Log.i(TAG, "Column" + cols + " Rows" + rows);
        Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");

        Mat touchedRegionRgba = mRgba.submat(touchedRect);

        Mat touchedRegionHsv = new Mat();
        Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);

        // Calculate average color of touched region
        mBlobColorHsv[count] = Core.sumElems(touchedRegionHsv);
        int pointCount = touchedRect.width * touchedRect.height;
        for (int i = 0; i < mBlobColorHsv[count].val.length; i++)
            mBlobColorHsv[count].val[i] /= pointCount;

        mBlobColorRgba[count] = converScalarHsv2Rgba(mBlobColorHsv[count]);

        Log.i(TAG, "Before" + mBlobColorHsv[count].val[0] + " " + mBlobColorHsv[count].val[1] + " "
                + mBlobColorHsv[count].val[2]);
        Log.i(TAG, "After" + mBlobColorRgba[count].val[0] + " " + mBlobColorRgba[count].val[1] + " "
                + mBlobColorRgba[count].val[2]);

        Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba[count].val[0] + ", " + mBlobColorRgba[count].val[1]
                + ", " + mBlobColorRgba[count].val[2] + ", " + mBlobColorRgba[count].val[3] + ")");

        // mDetector[count].setHsvColor(mBlobColorHsv[count]);
        setHSV(count);

        mIsColorSelected[count] = true;

        touchedRegionRgba.release();
        touchedRegionHsv.release();
    }
    return true; // don't need subsequent touch events
}

From source file:interactivespaces.service.image.vision.opencv.swing.OpenCvMatPanel.java

License:Apache License

/**
 * Draw an OpenCV image in the panel./* w  w w  .  j av a2  s  . co m*/
 *
 * @param opencvImage
 *          the image to draw
 */
public void drawImage(final Mat opencvImage) {
    SwingWorker<BufferedImage, Void> worker = new SwingWorker<BufferedImage, Void>() {

        @Override
        protected BufferedImage doInBackground() throws Exception {
            return MatUtils.matToBufferedImage(opencvImage);
        }

        @Override
        protected void done() {
            try {
                image = get();

                repaint();

                if (releaseAfterDraw) {
                    opencvImage.release();
                }
            } catch (InterruptedException e) {
                log.info("Swing worker for rendering Mat images interrupted");
            } catch (ExecutionException e) {
                log.error("Error during Swing worker for rendering Mat images", e);
            }
        }
    };

    worker.execute();
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.camera.CameraCallbackBase.java

License:Open Source License

private double detectShadows(FinderPatternInfo info, Mat mat) {
    double shadowPercentage = NO_SHADOW_DATA;

    if (mat == null) {
        return shadowPercentage;
    }//from w ww  . jav  a2 s .  com

    //fill the linked list up to 25 items; meant to stabilise the view, keep it from flickering.
    if (shadowTrack.size() > MAX_LIST_COUNT) {
        shadowTrack.removeFirst();
    }

    if (info != null) {
        double[] tl = new double[] { info.getTopLeft().getX(), info.getTopLeft().getY() };
        double[] tr = new double[] { info.getTopRight().getX(), info.getTopRight().getY() };
        double[] bl = new double[] { info.getBottomLeft().getX(), info.getBottomLeft().getY() };
        double[] br = new double[] { info.getBottomRight().getX(), info.getBottomRight().getY() };
        mat = OpenCVUtil.perspectiveTransform(tl, tr, bl, br, mat).clone();

        try {
            if (calibrationData != null) {
                shadowPercentage = PreviewUtil.getShadowPercentage(mat, calibrationData);
                shadowTrack.add(shadowPercentage);
            }
        } catch (Exception e) {
            Timber.e(e);
        } finally {
            if (mat != null) {
                mat.release();
            }
        }
    }

    return shadowPercentage;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.OpenCVUtil.java

License:Open Source License

@SuppressWarnings("UnusedParameters")
public static Mat detectStrip(Mat stripArea, StripTest.Brand brand, double ratioW, double ratioH) {
    List<Mat> channels = new ArrayList<>();
    Mat sArea = stripArea.clone();//from  w ww  .  j a v a 2s.c o m

    // Gaussian blur
    Imgproc.medianBlur(sArea, sArea, 3);
    Core.split(sArea, channels);

    // create binary image
    Mat binary = new Mat();

    // determine min and max NOT USED
    Imgproc.threshold(channels.get(0), binary, 128, MAX_RGB_INT_VALUE, Imgproc.THRESH_BINARY);

    // compute first approximation of line through length of the strip
    final WeightedObservedPoints points = new WeightedObservedPoints();
    final WeightedObservedPoints corrPoints = new WeightedObservedPoints();

    double tot, yTot;
    for (int i = 0; i < binary.cols(); i++) { // iterate over cols
        tot = 0;
        yTot = 0;
        for (int j = 0; j < binary.rows(); j++) { // iterate over rows
            if (binary.get(j, i)[0] > 128) {
                yTot += j;
                tot++;
            }
        }
        if (tot > 0) {
            points.add((double) i, yTot / tot);
        }
    }

    // order of coefficients is (b + ax), so [b, a]
    final PolynomialCurveFitter fitter = PolynomialCurveFitter.create(1);
    List<WeightedObservedPoint> pointsList = points.toList();
    final double[] coefficient = fitter.fit(pointsList);

    // second pass, remove outliers
    double estimate, actual;

    for (int i = 0; i < pointsList.size(); i++) {
        estimate = coefficient[1] * pointsList.get(i).getX() + coefficient[0];
        actual = pointsList.get(i).getY();
        if (actual > LOWER_PERCENTAGE_BOUND * estimate && actual < UPPER_PERCENTAGE_BOUND * estimate) {
            //if the point differs less than +/- 10%, keep the point
            corrPoints.add(pointsList.get(i).getX(), pointsList.get(i).getY());
        }
    }

    final double[] coefficientCorr = fitter.fit(corrPoints.toList());
    double slope = coefficientCorr[1];
    double offset = coefficientCorr[0];

    // compute rotation angle
    double rotAngleDeg = Math.atan(slope) * 180 / Math.PI;

    //determine a point on the line, in the middle of strip, in the horizontal middle of the whole image
    int midPointX = binary.cols() / 2;
    int midPointY = (int) Math.round(midPointX * slope + offset);

    // rotate around the midpoint, to straighten the binary strip
    Mat dstBinary = new Mat(binary.rows(), binary.cols(), binary.type());
    Point center = new Point(midPointX, midPointY);
    Mat rotMat = Imgproc.getRotationMatrix2D(center, rotAngleDeg, 1.0);
    Imgproc.warpAffine(binary, dstBinary, rotMat, binary.size(),
            Imgproc.INTER_CUBIC + Imgproc.WARP_FILL_OUTLIERS);

    // also apply rotation to colored strip
    Mat dstStrip = new Mat(stripArea.rows(), stripArea.cols(), stripArea.type());
    Imgproc.warpAffine(stripArea, dstStrip, rotMat, binary.size(),
            Imgproc.INTER_CUBIC + Imgproc.WARP_FILL_OUTLIERS);

    // Compute white points in each row
    double[] rowCount = new double[dstBinary.rows()];
    int rowTot;
    for (int i = 0; i < dstBinary.rows(); i++) { // iterate over rows
        rowTot = 0;
        for (int j = 0; j < dstBinary.cols(); j++) { // iterate over cols
            if (dstBinary.get(i, j)[0] > 128) {
                rowTot++;
            }
        }
        rowCount[i] = rowTot;
    }

    // find width by finding rising and dropping edges
    // rising edge  = largest positive difference
    // falling edge = largest negative difference
    int risePos = 0;
    int fallPos = 0;
    double riseVal = 0;
    double fallVal = 0;
    for (int i = 0; i < dstBinary.rows() - 1; i++) {
        if (rowCount[i + 1] - rowCount[i] > riseVal) {
            riseVal = rowCount[i + 1] - rowCount[i];
            risePos = i + 1;
        }
        if (rowCount[i + 1] - rowCount[i] < fallVal) {
            fallVal = rowCount[i + 1] - rowCount[i];
            fallPos = i;
        }
    }

    // cut out binary strip
    Point stripTopLeft = new Point(0, risePos);
    Point stripBottomRight = new Point(dstBinary.cols(), fallPos);

    org.opencv.core.Rect stripAreaRect = new org.opencv.core.Rect(stripTopLeft, stripBottomRight);
    Mat binaryStrip = dstBinary.submat(stripAreaRect);

    // also cut out colored strip
    Mat colorStrip = dstStrip.submat(stripAreaRect);

    // now right end of strip
    // method: first rising edge

    double[] colCount = new double[binaryStrip.cols()];
    int colTotal;
    for (int i = 0; i < binaryStrip.cols(); i++) { // iterate over cols
        colTotal = 0;
        for (int j = 0; j < binaryStrip.rows(); j++) { // iterate over rows
            if (binaryStrip.get(j, i)[0] > 128) {
                colTotal++;
            }
        }

        //Log.d("Caddisfly", String.valueOf(colTotal));
        colCount[i] = colTotal;
    }

    stripAreaRect = getStripRectangle(binaryStrip, colCount, brand.getStripLength(), ratioW);

    Mat resultStrip = colorStrip.submat(stripAreaRect).clone();

    // release Mat objects
    stripArea.release();
    sArea.release();
    binary.release();
    dstBinary.release();
    dstStrip.release();
    binaryStrip.release();
    colorStrip.release();

    return resultStrip;
}

From source file:org.akvo.caddisfly.sensor.colorimetry.strip.util.PreviewUtil.java

License:Open Source License

public static double getShadowPercentage(Mat bgr, @NonNull CalibrationData data) {

    double sumLum = 0;
    int countDev = 0;
    int countMaxDev = 0;
    double deviation;

    Mat lab = new Mat();
    Imgproc.cvtColor(bgr, lab, Imgproc.COLOR_BGR2Lab);

    double[][] points = CalibrationCard.createWhitePointArray(lab, data);

    //get the sum total of luminosity values
    for (double[] point : points) {
        sumLum += point[2];//w  w  w .  ja  v  a  2 s  .co  m
    }

    double avgLum = sumLum / points.length;
    double avgLumReciprocal = 1.0 / avgLum;

    for (double[] point : points) {
        double lum = point[2];
        deviation = Math.abs(lum - avgLum) * avgLumReciprocal;

        // count number of points that differ more than CONTRAST_DEVIATION_FRACTION from the average
        if (deviation > Constant.CONTRAST_DEVIATION_FRACTION) {
            countDev++;
        }

        // count number of points that differ more than CONTRAST_MAX_DEVIATION_FRACTION from the average
        if (deviation > Constant.CONTRAST_MAX_DEVIATION_FRACTION) {
            countMaxDev++;
        }
    }

    // the countMaxDev is already counted once in countDev. The following formula
    // lets points that are way off count 10 times as heavy in the result.
    // maximise to 100%
    double result = Math.min(countDev + LUMINOSITY_WEIGHT * countMaxDev, points.length);

    lab.release();
    return (result / points.length) * 100.0;
}

From source file:org.ar.rubik.ImageRecognizer.java

License:Open Source License

/**
 * On Camera Frame/*from   w  ww .  j a v  a2  s. c o m*/
 * 
 * Process frame image through Rubik Face recognition possibly resulting in a state change.
 * 
 *  (non-Javadoc)
 * @see org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2#onCameraFrame(org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame)
 */
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    //      Log.e(Constants.TAG, "CV Thread ID = " + Thread.currentThread().getId());

    // Just display error message if it is non-null.
    if (errorImage != null)
        return errorImage;

    Mat image = inputFrame.rgba();
    Size imageSize = image.size();
    Log.v(Constants.TAG_CAL, "Input Frame width=" + imageSize.width + " height=" + imageSize.height);
    if (imageSize.width != stateModel.openCVSize.width || imageSize.height != stateModel.openCVSize.height)
        Log.e(Constants.TAG_CAL, "State Model openCVSize does not agree with input frame!");

    // Save or Recall image as requested
    switch (MenuAndParams.imageSourceMode) {
    case NORMAL:
        break;
    case SAVE_NEXT:
        Util.saveImage(image);
        MenuAndParams.imageSourceMode = ImageSourceModeEnum.NORMAL;
        break;
    case PLAYBACK:
        image = Util.recallImage();
    default:
        break;
    }

    // Calculate and display Frames Per Second
    long newTimeStamp = System.currentTimeMillis();
    if (framesPerSecondTimeStamp > 0) {
        long frameTime = newTimeStamp - framesPerSecondTimeStamp;
        double framesPerSecond = 1000.0 / frameTime;
        String string = String.format("%4.1f FPS", framesPerSecond);
        Core.putText(image, string, new Point(50, 700), Constants.FontFace, 2, ColorTileEnum.WHITE.cvColor, 2);
    }
    framesPerSecondTimeStamp = newTimeStamp;

    try {

        // Initialize
        RubikFace rubikFace = new RubikFace();
        rubikFace.profiler.markTime(Profiler.Event.START);
        Log.i(Constants.TAG, "============================================================================");

        /* **********************************************************************
         * **********************************************************************
         * Return Original Image
         */
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.DIRECT) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            return annotation.drawAnnotation(image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Process to Grey Scale
         * 
         * This algorithm finds highlights areas that are all of nearly
         * the same hue.  In particular, cube faces should be highlighted.
         */
        Mat greyscale_image = new Mat();
        Imgproc.cvtColor(image, greyscale_image, Imgproc.COLOR_BGR2GRAY);
        rubikFace.profiler.markTime(Profiler.Event.GREYSCALE);
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.GREYSCALE) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            image.release();
            return annotation.drawAnnotation(greyscale_image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Gaussian Filter Blur prevents getting a lot of false hits 
         */
        Mat blur_image = new Mat();

        int kernelSize = (int) MenuAndParams.gaussianBlurKernelSizeParam.value;
        kernelSize = kernelSize % 2 == 0 ? kernelSize + 1 : kernelSize; // make odd
        Imgproc.GaussianBlur(greyscale_image, blur_image, new Size(kernelSize, kernelSize), -1, -1);
        rubikFace.profiler.markTime(Profiler.Event.GAUSSIAN);
        greyscale_image.release();
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.GAUSSIAN) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            image.release();
            return annotation.drawAnnotation(blur_image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Canny Edge Detection
         */
        Mat canny_image = new Mat();
        Imgproc.Canny(blur_image, canny_image, MenuAndParams.cannyLowerThresholdParam.value,
                MenuAndParams.cannyUpperThresholdParam.value, 3, // Sobel Aperture size.  This seems to be typically value used in the literature: i.e., a 3x3 Sobel Matrix.
                false); // use cheap gradient calculation: norm =|dI/dx|+|dI/dy|
        rubikFace.profiler.markTime(Profiler.Event.EDGE);
        blur_image.release();
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.CANNY) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            image.release();
            return annotation.drawAnnotation(canny_image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Dilation Image Process
         */
        Mat dilate_image = new Mat();
        Imgproc.dilate(canny_image, dilate_image, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(
                MenuAndParams.dilationKernelSizeParam.value, MenuAndParams.dilationKernelSizeParam.value)));
        rubikFace.profiler.markTime(Profiler.Event.DILATION);
        canny_image.release();
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.DILATION) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            image.release();
            return annotation.drawAnnotation(dilate_image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Contour Generation 
         */
        List<MatOfPoint> contours = new LinkedList<MatOfPoint>();
        Mat heirarchy = new Mat();
        Imgproc.findContours(dilate_image, contours, heirarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); // Note: tried other TC89 options, but no significant change or improvement on cpu time.
        rubikFace.profiler.markTime(Profiler.Event.CONTOUR);
        dilate_image.release();

        // Create gray scale image but in RGB format, and then added yellow colored contours on top.
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.CONTOUR) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            Mat gray_image = new Mat(imageSize, CvType.CV_8UC4);
            Mat rgba_gray_image = new Mat(imageSize, CvType.CV_8UC4);
            Imgproc.cvtColor(image, gray_image, Imgproc.COLOR_RGB2GRAY);
            Imgproc.cvtColor(gray_image, rgba_gray_image, Imgproc.COLOR_GRAY2BGRA, 3);
            Imgproc.drawContours(rgba_gray_image, contours, -1, ColorTileEnum.YELLOW.cvColor, 3);
            Core.putText(rgba_gray_image, "Num Contours: " + contours.size(), new Point(500, 50),
                    Constants.FontFace, 4, ColorTileEnum.RED.cvColor, 4);
            gray_image.release();
            image.release();
            return annotation.drawAnnotation(rgba_gray_image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Polygon Detection
         */
        List<Rhombus> polygonList = new LinkedList<Rhombus>();
        for (MatOfPoint contour : contours) {

            // Keep only counter clockwise contours.  A clockwise contour is reported as a negative number.
            double contourArea = Imgproc.contourArea(contour, true);
            if (contourArea < 0.0)
                continue;

            // Keep only reasonable area contours
            if (contourArea < MenuAndParams.minimumContourAreaParam.value)
                continue;

            // Floating, instead of Double, for some reason required for approximate polygon detection algorithm.
            MatOfPoint2f contour2f = new MatOfPoint2f();
            MatOfPoint2f polygone2f = new MatOfPoint2f();
            MatOfPoint polygon = new MatOfPoint();

            // Make a Polygon out of a contour with provide Epsilon accuracy parameter.
            // It uses the Douglas-Peucker algorithm http://en.wikipedia.org/wiki/Ramer-Douglas-Peucker_algorithm
            contour.convertTo(contour2f, CvType.CV_32FC2);
            Imgproc.approxPolyDP(contour2f, polygone2f, MenuAndParams.polygonEpsilonParam.value, // The maximum distance between the original curve and its approximation.
                    true); // Resulting polygon representation is "closed:" its first and last vertices are connected.
            polygone2f.convertTo(polygon, CvType.CV_32S);

            polygonList.add(new Rhombus(polygon));
        }

        rubikFace.profiler.markTime(Profiler.Event.POLYGON);

        // Create gray scale image but in RGB format, and then add yellow colored polygons on top.
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.POLYGON) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            Mat gray_image = new Mat(imageSize, CvType.CV_8UC4);
            Mat rgba_gray_image = new Mat(imageSize, CvType.CV_8UC4);
            Imgproc.cvtColor(image, gray_image, Imgproc.COLOR_RGB2GRAY);
            Imgproc.cvtColor(gray_image, rgba_gray_image, Imgproc.COLOR_GRAY2BGRA, 4);
            for (Rhombus polygon : polygonList)
                polygon.draw(rgba_gray_image, ColorTileEnum.YELLOW.cvColor);
            Core.putText(rgba_gray_image, "Num Polygons: " + polygonList.size(), new Point(500, 50),
                    Constants.FontFace, 3, ColorTileEnum.RED.cvColor, 4);
            return annotation.drawAnnotation(rgba_gray_image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Rhombus Tile Recognition
         * 
         * From polygon list, produces a list of suitable Parallelograms (Rhombi).
         */
        Log.i(Constants.TAG, String.format("Rhombus:   X    Y   Area   a-a  b-a a-l b-l gamma"));
        List<Rhombus> rhombusList = new LinkedList<Rhombus>();
        // Get only valid Rhombus(es) : actually parallelograms.
        for (Rhombus rhombus : polygonList) {
            rhombus.qualify();
            if (rhombus.status == Rhombus.StatusEnum.VALID)
                rhombusList.add(rhombus);
        }

        // Filtering w.r.t. Rhmobus set characteristics
        Rhombus.removedOutlierRhombi(rhombusList);

        rubikFace.profiler.markTime(Profiler.Event.RHOMBUS);

        // Create gray scale image but in RGB format, and then add yellow colored Rhombi (parallelograms) on top.
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.RHOMBUS) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            Mat gray_image = new Mat(imageSize, CvType.CV_8UC4);
            Mat rgba_gray_image = new Mat(imageSize, CvType.CV_8UC4);
            Imgproc.cvtColor(image, gray_image, Imgproc.COLOR_RGB2GRAY);
            Imgproc.cvtColor(gray_image, rgba_gray_image, Imgproc.COLOR_GRAY2BGRA, 4);
            for (Rhombus rhombus : rhombusList)
                rhombus.draw(rgba_gray_image, ColorTileEnum.YELLOW.cvColor);
            Core.putText(rgba_gray_image, "Num Rhombus: " + rhombusList.size(), new Point(500, 50),
                    Constants.FontFace, 4, ColorTileEnum.RED.cvColor, 4);
            gray_image.release();
            image.release();
            return annotation.drawAnnotation(rgba_gray_image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Face Recognition
         * 
         * Takes a collection of Rhombus objects and determines if a valid
         * Rubik Face can be determined from them, and then also determines 
         * initial color for all nine tiles. 
         */
        rubikFace.processRhombuses(rhombusList, image);
        Log.i(Constants.TAG, "Face Solution = " + rubikFace.faceRecognitionStatus);
        rubikFace.profiler.markTime(Profiler.Event.FACE);
        if (MenuAndParams.imageProcessMode == ImageProcessModeEnum.FACE_DETECT) {
            stateModel.activeRubikFace = rubikFace;
            rubikFace.profiler.markTime(Profiler.Event.TOTAL);
            return annotation.drawAnnotation(image);
        }

        /* **********************************************************************
         * **********************************************************************
         * Cube Pose Estimation
         * 
         * Reconstruct the Rubik Cube 3D location and orientation in GL space coordinates.
         */
        if (rubikFace.faceRecognitionStatus == FaceRecognitionStatusEnum.SOLVED) {

            // Obtain Cube Pose from Face Grid information.
            stateModel.cubePose = CubePoseEstimator.poseEstimation(rubikFace, image, stateModel);

            // Process measurement update on Kalman Filter (if it exists).
            KalmanFilter kalmanFilter = stateModel.kalmanFilter;
            if (kalmanFilter != null)
                kalmanFilter.measurementUpdate(stateModel.cubePose, System.currentTimeMillis());

            // Process measurement update on Kalman Filter ALSM (if it exists).
            KalmanFilterALSM kalmanFilterALSM = stateModel.kalmanFilterALSM;
            if (kalmanFilter != null)
                kalmanFilterALSM.measurementUpdate(stateModel.cubePose, System.currentTimeMillis());
        } else {
            stateModel.cubePose = null;
        }
        rubikFace.profiler.markTime(Profiler.Event.POSE);

        /* **********************************************************************
         * **********************************************************************
         * Application State Machine
         * 
         * Will provide user instructions.
         * Will determine when we are on-face and off-face
         * Will determine when we are on-new-face
         * Will change state 
         */
        appStateMachine.onFaceEvent(rubikFace);
        rubikFace.profiler.markTime(Profiler.Event.CONTROLLER);
        rubikFace.profiler.markTime(Profiler.Event.TOTAL);

        // Normal return point.
        stateModel.activeRubikFace = rubikFace;
        return annotation.drawAnnotation(image);

        // =+= Issue: how to get stdio to print as error and not warning in logcat?
    } catch (CvException e) {
        Log.e(Constants.TAG, "CvException: " + e.getMessage());
        e.printStackTrace();
        errorImage = new Mat(imageSize, CvType.CV_8UC4);
        Core.putText(errorImage, "CvException: " + e.getMessage(), new Point(50, 50), Constants.FontFace, 2,
                ColorTileEnum.WHITE.cvColor, 2);
        int i = 1;
        for (StackTraceElement element : e.getStackTrace())
            Core.putText(errorImage, element.toString(), new Point(50, 50 + 50 * i++), Constants.FontFace, 2,
                    ColorTileEnum.WHITE.cvColor, 2);
    } catch (Exception e) {
        Log.e(Constants.TAG, "Exception: " + e.getMessage());
        e.printStackTrace();
        errorImage = new Mat(imageSize, CvType.CV_8UC4);
        Core.putText(errorImage, "Exception: " + e.getMessage(), new Point(50, 50), Constants.FontFace, 2,
                ColorTileEnum.WHITE.cvColor, 2);
        int i = 1;
        for (StackTraceElement element : e.getStackTrace())
            Core.putText(errorImage, element.toString(), new Point(50, 50 + 50 * i++), Constants.FontFace, 2,
                    ColorTileEnum.WHITE.cvColor, 2);
    } catch (Error e) {
        Log.e(Constants.TAG, "Error: " + e.getMessage());
        e.printStackTrace();
        errorImage = new Mat(imageSize, CvType.CV_8UC4);
        Core.putText(errorImage, "Error: " + e.getMessage(), new Point(50, 50), Constants.FontFace, 2,
                ColorTileEnum.WHITE.cvColor, 2);
        int i = 1;
        for (StackTraceElement element : e.getStackTrace())
            Core.putText(errorImage, element.toString(), new Point(50, 50 + 50 * i++), Constants.FontFace, 2,
                    ColorTileEnum.WHITE.cvColor, 2);
    }

    return annotation.drawAnnotation(image);
}