Example usage for org.opencv.core Mat Mat

List of usage examples for org.opencv.core Mat Mat

Introduction

In this page you can find the example usage for org.opencv.core Mat Mat.

Prototype

public Mat() 

Source Link

Usage

From source file:devices.Video.java

@Override
public void run() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    VideoCapture cap = new VideoCapture(this.id);

    try {// w w w .  j a  va  2s  .c  om
        Thread.sleep(2000);
    } catch (InterruptedException ex) {
        Logger.getLogger(Video.class.getName()).log(Level.SEVERE, null, ex);
    }

    // Check if video capturing is enabled
    if (!cap.isOpened()) {
        System.out.println("peta");
        System.exit(-1);
    }

    // Matrix for storing image
    Mat image = new Mat();
    // Frame for displaying image
    MyFrame frame = new MyFrame();
    frame.setVisible(true);

    // Main loop
    while (true) {
        // Read current camera frame into matrix
        cap.read(image);
        // Render frame if the camera is still acquiring images
        if (image != null) {
            frame.render(image);
        } else {
            System.out.println("No captured frame -- camera disconnected");
            break;
        }
    }
}

From source file:dfmDrone.examples.fitEllipseExample.java

private static Mat findAndDrawEllipse(Mat sourceImg) {
    Mat grayScaleImg = new Mat();
    Mat hsvImg = new Mat();
    Imgproc.cvtColor(sourceImg, hsvImg, Imgproc.COLOR_BGR2HSV);
    Mat lower_hue_range = new Mat();
    Mat upper_hue_range = new Mat();
    Core.inRange(hsvImg, new Scalar(0, 100, 45), new Scalar(15, 255, 255), lower_hue_range);
    Core.inRange(hsvImg, new Scalar(160, 100, 45), new Scalar(180, 255, 255), upper_hue_range);
    Mat red_hue_image = new Mat();
    Core.addWeighted(lower_hue_range, 1.0, upper_hue_range, 1.0, 0, red_hue_image);
    Mat dilateElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(24, 24));
    Mat erodeElement = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(10, 10));

    Imgproc.blur(red_hue_image, red_hue_image, new Size(11, 11));
    // init//from w ww. ja  va  2s . co m
    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();

    // find contours
    Imgproc.findContours(red_hue_image, contours, hierarchy, Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_SIMPLE);
    System.out.println("After findcontours");
    // if any contour exist...
    if (hierarchy.size().height > 0 && hierarchy.size().width > 0) {
        // for each contour, display it in blue
        for (int idx = 0; idx >= 0; idx = (int) hierarchy.get(0, idx)[0]) {
            System.out.println(idx);
            //   Imgproc.drawContours(frame, contours, idx, new Scalar(250, 0, 0), 3);

        }
    }
    MatOfPoint2f approxCurve = new MatOfPoint2f();

    //For each contour found
    MatOfPoint2f contour2f = null;
    RotatedRect rotatedrect = null;
    for (MatOfPoint contour : contours) {
        //Convert contours(i) from MatOfPoint to MatOfPoint2f
        if (contour2f == null)
            contour2f = new MatOfPoint2f(contour.toArray());
        if (contour.size().area() > contour2f.size().area()) {
            contour2f = new MatOfPoint2f(contour.toArray());
        }
    }
    try {
        Imgproc.fitEllipse(contour2f);
        rotatedrect = Imgproc.fitEllipse(contour2f);

        double approxDistance = Imgproc.arcLength(contour2f, true) * 0.02;
        Imgproc.approxPolyDP(contour2f, approxCurve, approxDistance, true);

        //Convert back to MatOfPoint
        MatOfPoint points = new MatOfPoint(approxCurve.toArray());

        // Get bounding rect of contour
        Rect rect = Imgproc.boundingRect(points);

        // draw enclosing rectangle (all same color, but you could use variable i to make them unique)
        Imgproc.rectangle(sourceImg, rect.tl(), rect.br(), new Scalar(255, 0, 0), 1, 8, 0);
        Imgproc.ellipse(sourceImg, rotatedrect, new Scalar(255, 192, 203), 4, 8);
    } catch (CvException e) {
        e.printStackTrace();
        System.out.println("Ingen ellipse fundet");
    }
    return sourceImg;
}

From source file:digitalassistant.Panel.java

public void run() {
    try {//from  www  .  ja v  a  2s . c  o m
        while (true) {

            if (curr_image.equalsIgnoreCase("nitial_image")) {
                ImageIcon icon = new ImageIcon(ImageIO.read(new File(
                        "C:\\Users\\sandeep\\Documents\\NetBeansProjects\\DigitalAssistant\\src\\digitalassistant\\initial_image.jpg")));
                image_label.setIcon(icon);

            } else {
                System.out.println("Hello, OpenCV");

                // Load the native library.
                //System.loadLibrary("opencv_java244");
                //System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
                VideoCapture camera = new VideoCapture(0);
                System.out.println("inage width" + image_label.getWidth());

                camera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, image_label.getWidth());
                camera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, image_label.getHeight());
                Thread.sleep(1000);
                camera.open(0); //Useless
                if (!camera.isOpened()) {
                    System.out.println("Camera Error");
                } else {
                    System.out.println("Camera OK?");
                }

                Mat frame = new Mat();

                // camera.grab();
                //System.out.println("Frame Grabbed");
                // camera.retrieve(frame);
                //System.out.println("Frame Decoded");
                System.out.println("Frame Obtained");

                /* No difference
                 camera.release();
                 */
                System.out.println("Captured Frame Width " + frame.width());
                // JFrame frame1 = new JFrame("BasicPanel");
                // frame1.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
                // DetectFace f = new DetectFace();

                int count = 10;
                while (true) {
                    camera.read(frame);
                    // Core.putText(frame,count+"", new Point(frame.width()/4,frame.height()/4), 3, 2,new Scalar(0, 255, 0),3);

                    //f.face_detect(frame);
                    BufferedImage image = matToBufferedImage(frame);

                    ImageIcon icon = new ImageIcon(image);

                    icon.getImage().flush();

                    image_label.setIcon(icon);
                    // Thread.sleep(500);
                    //count--;

                }
                // camera.release();
                // curr_image = "initial_image";

            }

            //  ImageIcon icon =new ImageIcon(ImageIO.read( new File("C:\\Users\\sandeep\\Documents\\NetBeansProjects\\DigitalAssistant\\src\\digitalassistant\\initial_image.jpg")) );
            // image_label.setIcon(icon);

            // camera.read(frame);
            // Highgui.imwrite("camera.jpg", frame);
            // frame1.setVisible(false);
            // System.out.println("OK");
        }
    } catch (Exception e) {
        System.out.println(e);
    }
}

From source file:digitalassistant.Panel.java

public void capture() {
    try {/*from w  ww. j  a  va 2 s.  c o m*/
        System.out.println("Hello, OpenCV");
        // Load the native library.
        //System.loadLibrary("opencv_java244");
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        VideoCapture camera = new VideoCapture(0);
        Thread.sleep(1000);
        camera.open(0); //Useless
        if (!camera.isOpened()) {
            System.out.println("Camera Error");
        } else {
            System.out.println("Camera OK?");
        }

        Mat frame = new Mat();

        // camera.grab();
        //System.out.println("Frame Grabbed");
        // camera.retrieve(frame);
        //System.out.println("Frame Decoded");
        System.out.println("Frame Obtained");

        /* No difference
         camera.release();
         */
        System.out.println("Captured Frame Width " + frame.width());
        JFrame frame1 = new JFrame("BasicPanel");
        frame1.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
        // DetectFace f = new DetectFace();
        int count = 15;

        while (count > 0) {
            camera.read(frame);
            //f.face_detect(frame);
            frame1.setSize(frame.width(), frame.height());
            //  Core.putText(frame,count+"", new Point(frame.width()/4,frame.height()/4), 3, 2,new Scalar(0, 255, 0),3);
            // Core.rectangle(frame, new Point(frame.width()/4,frame.height()/4), new Point(frame.width()/4+300,frame.height()/4 +300), new Scalar(0, 255, 0));
            Panel panel1 = new Panel(frame);
            frame1.setContentPane(panel1);
            frame1.setVisible(true);
            Thread.sleep(100);
            count--;
        }

        // camera.read(frame);
        // Highgui.imwrite("camera.jpg", frame);
        // frame1.setVisible(false);
        // System.out.println("OK");
    } catch (Exception e) {
        System.out.println(e);
    }
}

From source file:Domain.ImgProcess.java

public Mat retornaFrame() {
    System.loadLibrary("opencv_java2410");
    /*opencv_java2410*/
    VideoCapture camera = new VideoCapture(0);//inicializa com o dispositivo de video #0

    camera.open(0);/*w ww.  j av a  2s.  com*/
    try {
        Thread.sleep(100);//necessario para a cmera poder ser inicializada
    } catch (InterruptedException ex) {
        System.out.printf(ex.getMessage());
    }

    if (camera.isOpened()) {

        System.out.println("Ta aberto");
    }

    Mat frame = new Mat();

    camera.read(frame);//captura o frame atual da cmera
    System.out.println("Frame adquirido.");
    camera.release();
    return frame;

}

From source file:edu.fiu.cate.breader.BaseSegmentation.java

/**
 * Finds the bounding box for the book on the stand using 
 * the high resolution image.//from   w  w w.  j ava  2s  . c o  m
 * @param src- High Resolution image of the book
 * @return Rectangle delineating the book
 */
public Rect highRes(Mat src) {
    Mat dst = src.clone();
    Imgproc.blur(src, dst, new Size(100.0, 100.0), new Point(-1, -1), Core.BORDER_REPLICATE);
    Imgproc.threshold(dst, dst, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU);
    Imgproc.Canny(dst, dst, 50, 200, 3, false);

    List<MatOfPoint> contours = new LinkedList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(dst, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE,
            new Point(0, 0));

    Mat color = new Mat();
    Imgproc.cvtColor(src, color, Imgproc.COLOR_GRAY2BGR);
    for (int k = 0; k < contours.size(); k++) {
        byte[] vals = ITools.getHeatMapColor((float) k / (float) contours.size());
        Imgproc.drawContours(color, contours, k, new Scalar(vals[0], vals[1], vals[2]), 8);
    }
    new IViewer("HighRes Contours ", BReaderTools.bufferedImageFromMat(color));

    Point center = new Point(src.cols() / 2, src.rows() / 2);
    //Check hierarchy tree
    int[] res = polySearch(center, hierarchy, contours, 0);
    while (res[0] != 1 && res[2] != -1) {
        res = polySearch(center, hierarchy, contours, res[2]);
        if (res[0] == 1)
            break;
    }

    MatOfInt tHull = new MatOfInt();
    int index = 0;
    if (res[1] != -1) {
        index = res[1];
    }
    Imgproc.convexHull(contours.get(index), tHull);

    //get bounding box
    MatOfPoint cont = contours.get(index);
    Point[] points = new Point[tHull.rows()];
    for (int i = 0; i < tHull.rows(); i++) {
        int pIndex = (int) tHull.get(i, 0)[0];
        points[i] = new Point(cont.get(pIndex, 0));
    }
    Rect out = Imgproc.boundingRect(new MatOfPoint(points));
    return out;
}

From source file:edu.fiu.cate.breader.BaseSegmentation.java

/**
 * Finds the bounding box for the book on the stand using 
 * the depth average image./*from   ww  w .  j  a v a2  s. c om*/
 * @param src- The Depth average image
 * @return Rectangle delineating the book
 */
public Rect lowResDist(Mat src) {
    Mat dst = src.clone();

    Imgproc.blur(src, dst, new Size(5, 5), new Point(-1, -1), Core.BORDER_REPLICATE);
    //      Imgproc.threshold(dst, dst, 0,255,Imgproc.THRESH_BINARY_INV+Imgproc.THRESH_OTSU);
    Imgproc.Canny(dst, dst, 50, 200, 3, false);
    //      Canny(src, dst, 20, 60, 3);

    List<MatOfPoint> contours = new LinkedList<>();
    Mat hierarchy = new Mat();
    /// Find contours
    Imgproc.findContours(dst, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE,
            new Point(0, 0));

    Mat color = new Mat();
    Imgproc.cvtColor(src, color, Imgproc.COLOR_GRAY2BGR);
    for (int k = 0; k < contours.size(); k++) {
        byte[] vals = ITools.getHeatMapColor((float) k / (float) contours.size());
        Imgproc.drawContours(color, contours, k, new Scalar(vals[0], vals[1], vals[2]), 1);
    }
    new IViewer("LowRes Contours ", BReaderTools.bufferedImageFromMat(color));

    for (int k = 0; k < contours.size(); k++) {
        MatOfPoint2f tMat = new MatOfPoint2f();
        Imgproc.approxPolyDP(new MatOfPoint2f(contours.get(k).toArray()), tMat, 5, true);
        contours.set(k, new MatOfPoint(tMat.toArray()));
    }

    List<Point> points = new LinkedList<Point>();
    for (int i = 0; i < contours.size(); i++) {
        points.addAll(contours.get(i).toList());
    }

    MatOfInt tHull = new MatOfInt();
    Imgproc.convexHull(new MatOfPoint(points.toArray(new Point[points.size()])), tHull);

    //get bounding box
    Point[] tHullPoints = new Point[tHull.rows()];
    for (int i = 0; i < tHull.rows(); i++) {
        int pIndex = (int) tHull.get(i, 0)[0];
        tHullPoints[i] = points.get(pIndex);
    }
    Rect out = Imgproc.boundingRect(new MatOfPoint(tHullPoints));
    return out;
}

From source file:edu.fiu.cate.breader.BaseSegmentation.java

/**
 * Capture button has been press. Obtain the high resolution image and the low resolution 
 * data. Once captured, the images are corrected. 
 *///w w  w .  j a va2  s. com
public void captureEvent() {
    long t0, t1;
    t0 = System.currentTimeMillis();
    t1 = t0;
    byte[][][] img = getHidefImage();
    System.out.println("HiRez Capture: " + (System.currentTimeMillis() - t0) / 1000.0);
    new IViewer("HiRez", ImageManipulation.getBufferedImage(img));

    t0 = System.currentTimeMillis();
    Rect bound = null;
    try {
        bound = highRes(BReaderTools.byteArrayToMat(ITools.toGrayscale(img)));
    } catch (java.lang.Exception e) {
    }
    System.out.println("First bounding box: " + (System.currentTimeMillis() - t0) / 1000.0);

    //      Mat imgMat = BReaderTools.byteArrayToMat(img);
    //      Imgproc.rectangle(imgMat, bound.tl(), bound.br(), new Scalar(255,255,0), 8);

    byte[][] low = ITools.normalize(normImgCropped);
    t0 = System.currentTimeMillis();
    Rect boundLow = null;
    try {
        boundLow = lowResDist(BReaderTools.byteArrayToMat(low));
    } catch (java.lang.Exception e) {
    }
    System.out.println("second bounding box: " + (System.currentTimeMillis() - t0) / 1000.0);

    if (bound == null || boundLow == null) {
        tts.doTTS("Document outside field of view. Please realign and press capture again.");
        return;
    }

    if ((bound.x + bound.width + 100) >= img[0][0].length || (bound.y + bound.height + 100) >= img[0].length) {
        tts.doTTS("Document outside field of view. Please realign and press capture again.");
        return;
    }

    //Show the cropped height map with the bounding box
    Mat color = new Mat();
    Imgproc.cvtColor(BReaderTools.byteArrayToMat(low), color, Imgproc.COLOR_GRAY2BGR);
    Imgproc.rectangle(color, boundLow.tl(), boundLow.br(), new Scalar(255, 255, 0), 1);
    new IViewer("LowRes Bounding Box", BReaderTools.bufferedImageFromMat(color));

    Imgproc.cvtColor(BReaderTools.byteArrayToMat(ITools.toGrayscale(img)), color, Imgproc.COLOR_GRAY2BGR);
    Imgproc.rectangle(color, bound.tl(), bound.br(), new Scalar(255, 255, 0), 8);
    new IViewer("HighRes Bounding Box", BReaderTools.bufferedImageFromMat(color));

    //      System.out.println(bound.height+", "+bound.width+": "+(double)bound.width/(double)bound.height);
    //      System.out.println(boundLow.height+", "+boundLow.width+": "+(double)boundLow.width/(double)boundLow.height);

    double rW = (double) bound.width / (double) boundLow.width;
    double rH = (double) bound.height / (double) boundLow.height;
    int h = 0, w = 0, yO = 0, xO = 0;
    double s = 0;

    if (rH < rW) {
        s = rH;
        h = boundLow.height;
        w = (int) (bound.width / rH);
        if ((w - boundLow.width) % 2 == 0) {
            xO = (boundLow.width - w) / 2;
        }
    } else {
        s = rW;
        h = (int) (bound.height / rW);
        w = boundLow.width;
        if ((h - boundLow.height) % 2 == 0) {
            yO = (boundLow.height - h) / 2;
        }
    }

    //show the high resolution image cropped
    byte[][][] hiRez = new byte[img.length][][];
    t0 = System.currentTimeMillis();
    for (int i = 0; i < img.length; i++) {
        hiRez[i] = ITools.crop(bound.x, bound.y, bound.x + bound.width, bound.y + bound.height, img[i]);
    }
    System.out.println("Cropping HiRez: " + (System.currentTimeMillis() - t0) / 1000.0);

    //Show the IR amplitude image cropped
    //      byte[][] amp = ITools.normalize(amplitudes);
    //      byte[][] ampRez = resize(amp, (float)s);
    //      int x0 = (int) ((boundLow.x+xO+40)*s), y0 = (int) ((boundLow.y+yO+25)*s);
    //      ampRez = ITools.crop(x0, y0, x0+bound.width, y0+bound.height, ampRez);
    //      new IViewer(ImageManipulation.getGrayBufferedImage(ampRez));

    //Show the Amplitude image in bounding box
    //      Rect nBound = new Rect(boundLow.x+xO+40, boundLow.y+yO+25, w, h);
    //      Mat gray = new Mat();
    //      Imgproc.cvtColor(BReaderTools.byteArrayToMat(ITools.normalize(amplitudes)), gray,Imgproc.COLOR_GRAY2BGR);
    //      Imgproc.rectangle(gray, nBound.tl(), nBound.br(), new Scalar(255,255,0), 1);
    //      new IViewer(BReaderTools.bufferedImageFromMat(gray));

    //Crop the distance image and prepare for correction
    float[][] distRez;
    Mat destRezM = new Mat();
    switch (disp.getInterpolationMethod()) {
    case 1:
        Imgproc.resize(BReaderTools.floatArrayToMat(normImg), destRezM, new Size(0, 0), s, s,
                Imgproc.INTER_LINEAR);//resize image
        break;
    case 2:
        Imgproc.resize(BReaderTools.floatArrayToMat(normImg), destRezM, new Size(0, 0), s, s,
                Imgproc.INTER_CUBIC);//resize image
        break;
    case 3:
        Imgproc.resize(BReaderTools.floatArrayToMat(normImg), destRezM, new Size(0, 0), s, s,
                Imgproc.INTER_LANCZOS4);//resize image
        break;
    }
    distRez = BReaderTools.matToFloatArray(destRezM);
    int xCentOff = (img[0][0].length - bound.width) / 2 - bound.x;
    int yCentOff = (img[0].length - bound.height) / 2 - bound.y;
    int x0 = (int) ((boundLow.x + xO + 40) * s), y0 = (int) ((boundLow.y + yO + 25) * s);
    distRez = ITools.crop(x0, y0, x0 + bound.width, y0 + bound.height, distRez);
    distRez = multiply(distRez, -100);

    byte[][][] foldCorrected = new byte[hiRez.length][][];
    t0 = System.currentTimeMillis();
    for (int i = 0; i < hiRez.length; i++) {
        foldCorrected[i] = BReaderTools.foldCorrection(hiRez[i], distRez, xCentOff, yCentOff);
    }
    System.out.println("Fold Correction: " + (System.currentTimeMillis() - t0) / 1000.0);

    float[][] distRezPushed = BReaderTools.foldCorrection(distRez,
            (distRez[0].length - boundLow.width) / 2 - boundLow.x,
            (distRez.length - boundLow.height) / 2 - boundLow.y);

    byte[][][] extensionCorrected = new byte[hiRez.length][][];
    t0 = System.currentTimeMillis();
    for (int i = 0; i < hiRez.length; i++) {
        extensionCorrected[i] = LuWang.extentionWithLinearInterpolation(foldCorrected[i], distRez);
    }
    System.out.println("Extension Correction: " + (System.currentTimeMillis() - t0) / 1000.0);

    new IViewer("Heigths", ImageManipulation.getGrayBufferedImage(ITools.normalize(distRez)));
    new IViewer("HiRez", ImageManipulation.getBufferedImage(hiRez));
    //      new IViewer("Corrected",ImageManipulation.getBufferedImage(foldCorrected));
    //      new IViewer("Heigths",ImageManipulation.getGrayBufferedImage(ITools.normalize(distRezPushed)));
    //      new IViewer("Flat",ImageManipulation.getBufferedImage(foldCorrected));
    //      new IViewer("Extension",ImageManipulation.getBufferedImage(extensionCorrected));
    System.out.println("Overall time: " + (System.currentTimeMillis() - t1) / 1000.0);

    SimpleDateFormat format = new SimpleDateFormat("YYYY-MM-dd-hh-mm-ss");
    String time = format.format(new Date(System.currentTimeMillis()));

    // Save Corrected High Rez.
    String imgPath = saveDir + "/correctedImage-" + time + ".tiff";
    switch (disp.getCorrectionMethod()) {
    case 1: {
        ImageManipulation.writeImage(hiRez, imgPath);
        new IViewer("Correction Results: None", ImageManipulation.getBufferedImage(hiRez));
    }
        break;
    case 2: {
        ImageManipulation.writeImage(foldCorrected, imgPath);
        new IViewer("Correction Results: Flattening", ImageManipulation.getBufferedImage(foldCorrected));
    }
        break;
    case 3: {
        ImageManipulation.writeImage(extensionCorrected, imgPath);
        new IViewer("Correction Results: Flattening + Extension",
                ImageManipulation.getBufferedImage(extensionCorrected));
    }
        break;
    }

    try {
        String text = abbyy.processImage(imgPath, saveDir + "/text-" + time + ".txt");
        System.out.println("Done!!!!");
        tts.doTTS(text);
    } catch (java.lang.NullPointerException e) {
        tts.doTTS("ABBYY License expired.");
    }
    saveData(time, img, hiRez, distRez, boundLow, bound);

}

From source file:edu.sfsu.cs.orange.ocr.OcrRecognizeAsyncTask.java

License:Apache License

@Override
protected Boolean doInBackground(Void... arg0) {
    long start = System.currentTimeMillis();
    Bitmap bitmap = activity.getCameraManager().buildLuminanceSource(data, width, height)
            .renderCroppedGreyscaleBitmap();

    String textResult;//from   w w  w . j a  v a2 s . c om
    Mat image = new Mat();
    Utils.bitmapToMat(bitmap, image);
    Mat gray = new Mat();
    Utils.bitmapToMat(bitmap, gray);

    Mat background = new Mat();
    Utils.bitmapToMat(bitmap, background); //to test with BinarizeBG
    Mat finalimage = new Mat();
    Utils.bitmapToMat(bitmap, finalimage);

    //image.convertTo( gray,CvType.CV_8UC1);
    //image.convertTo(image,CvType.CV_64F);
    try {
        Imgcodecs.imwrite("/storage/emulated/0/DCIM/orig.jpg", image);
        OpencvNativeClass.BinarizeShafait(gray.getNativeObjAddr(), image.getNativeObjAddr());

        Imgcodecs.imwrite("/storage/emulated/0/DCIM/binarized.jpg", image);
        Utils.matToBitmap(image, bitmap);

        //Pix fimage = ReadFile.readBitmap(bitmap);
        //fimage = Binarize.otsuAdaptiveThreshold(fimage);

        //float angle = Skew.findSkew(fimage);
        //Log.i("Skew: ", Float.toString(angle));
        //double deg2rad = 3.14159265 / 180.;

        //fimage = Rotate.rotate(fimage, angle);

        //bitmap = WriteFile.writeBitmap(fimage);

        Mat skewed = new Mat();

        //Utils.bitmapToMat(bitmap,skewed);
        //Imgcodecs.imwrite("/storage/emulated/0/DCIM/deskewed.jpg", skewed);

        baseApi.setImage(ReadFile.readBitmap(bitmap));

        textResult = baseApi.getUTF8Text();
        timeRequired = System.currentTimeMillis() - start;

        // Check for failure to recognize text
        if (textResult == null || textResult.equals("")) {
            return false;
        }

        ocrResult = new OcrResult();
        ocrResult.setWordConfidences(baseApi.wordConfidences());
        ocrResult.setMeanConfidence(baseApi.meanConfidence());
        ocrResult.setRegionBoundingBoxes(baseApi.getRegions().getBoxRects());
        ocrResult.setTextlineBoundingBoxes(baseApi.getTextlines().getBoxRects());
        ocrResult.setWordBoundingBoxes(baseApi.getWords().getBoxRects());
        ocrResult.setStripBoundingBoxes(baseApi.getStrips().getBoxRects());

        // Iterate through the results.
        final ResultIterator iterator = baseApi.getResultIterator();
        int[] lastBoundingBox;
        ArrayList<Rect> charBoxes = new ArrayList<Rect>();
        iterator.begin();
        do {
            lastBoundingBox = iterator.getBoundingBox(PageIteratorLevel.RIL_SYMBOL);
            Rect lastRectBox = new Rect(lastBoundingBox[0], lastBoundingBox[1], lastBoundingBox[2],
                    lastBoundingBox[3]);
            charBoxes.add(lastRectBox);
        } while (iterator.next(PageIteratorLevel.RIL_SYMBOL));
        iterator.delete();
        ocrResult.setCharacterBoundingBoxes(charBoxes);

    } catch (RuntimeException e) {
        Log.e("OcrRecognizeAsyncTask",
                "Caught RuntimeException in request to Tesseract. Setting state to CONTINUOUS_STOPPED.");
        e.printStackTrace();
        try {
            baseApi.clear();
            activity.stopHandler();
        } catch (NullPointerException e1) {
            // Continue
        }
        return false;
    }
    timeRequired = System.currentTimeMillis() - start;
    ocrResult.setBitmap(bitmap);
    String[] temp = textResult.split("\n");
    if (temp.length != 0)
        textResult = "";
    for (int i = 0; i < temp.length; i++) {
        if (temp[i].length() != 0) {
            if (i < temp.length - 1) {
                textResult = textResult + temp[i] + "\n";
            } else
                textResult = textResult + temp[i];
        }
    }
    String textResult2 = ParsingNativeClass.ParseAddress(textResult);
    Log.d("Return parsing", textResult2);
    ocrResult.setViewtext(textResult);
    ocrResult.setText(textResult2);
    ocrResult.setRecognitionTimeRequired(timeRequired);
    return true;
}

From source file:edu.soict.hust.k57.mmdb.components.HistogramCaculator.java

@Override
public void accept(ImgEnt t) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    Mat mat = Mat.eye(3, 3, CvType.CV_8UC1);
    Mat m = Imgcodecs.imread(t.getF().getPath());
    List<Mat> images = new ArrayList<Mat>();
    Core.split(m, images);//from  w  w w . j a  va 2s.c om

    MatOfInt histSize = new MatOfInt(t.getBin()); // kch thc ca histogram
    MatOfInt channels = new MatOfInt(0); // Knh mu mun tnh
    MatOfFloat histRange = new MatOfFloat(0, 256);

    Mat bHist = new Mat();
    Mat gHist = new Mat();
    Mat rHist = new Mat();

    Imgproc.calcHist(images.subList(0, 1), channels, new Mat(), bHist, histSize, histRange, false);
    Core.normalize(bHist, bHist, 0, 1, Core.NORM_MINMAX, -1, new Mat());
    Imgproc.calcHist(images.subList(1, 2), channels, new Mat(), gHist, histSize, histRange, false);
    Core.normalize(gHist, gHist, 0, 1, Core.NORM_MINMAX, -1, new Mat());
    Imgproc.calcHist(images.subList(2, 3), channels, new Mat(), rHist, histSize, histRange, false);
    Core.normalize(rHist, rHist, 0, 1, Core.NORM_MINMAX, -1, new Mat());
    t.setbHistogram(bHist);
    t.setgHistogram(gHist);
    t.setrHistogram(rHist);
}