Example usage for org.opencv.core Mat getNativeObjAddr

List of usage examples for org.opencv.core Mat getNativeObjAddr

Introduction

In this page you can find the example usage for org.opencv.core Mat getNativeObjAddr.

Prototype

public long getNativeObjAddr() 

Source Link

Usage

From source file:at.ac.tuwien.caa.docscan.camera.NativeWrapper.java

License:Open Source License

/**
 * Returns an array of Patch objects, containing focus measurement results.
 * @param src OpenCV Mat//from ww w.jav  a  2 s .  c  o m
 * @return array of Patch objects
 */
public static Patch[] getFocusMeasures(Mat src) {

    Patch[] patches = nativeGetFocusMeasures(src.getNativeObjAddr());

    return patches;

}

From source file:at.ac.tuwien.caa.docscan.camera.NativeWrapper.java

License:Open Source License

/**
 * Returns an array of DkPolyRect objects, containing page segmentation results.
 * @param src OpenCV Mat// w w  w.j  a  v a 2  s  . co m
 * @return array of DkPolyRect objects
 */
public static DkPolyRect[] getPageSegmentation(Mat src) {

    DkPolyRect[] rects = nativeGetPageSegmentation(src.getNativeObjAddr(), mUseLab, mOldRect);

    if (rects.length > 0)
        mOldRect = rects[0];
    else
        mOldRect = new DkPolyRect();

    return rects;
}

From source file:at.ac.tuwien.caa.docscan.camera.NativeWrapper.java

License:Open Source License

public static double getIllumination(Mat src, DkPolyRect polyRect) {
    return nativeGetIllumination(src.getNativeObjAddr(), polyRect);
}

From source file:com.crea_si.eviacam.service.MainEngine.java

License:Open Source License

/**
 * Process incoming camera frames//  w  w w . ja v  a  2  s .c om
 *
 * Remarks: this method is called from a secondary thread
 *
 * @param rgba opencv matrix with the captured image
 */
@Override
public void processFrame(Mat rgba) {
    // For these states do nothing
    if (mCurrentState == STATE_DISABLED || mCurrentState == STATE_STOPPED)
        return;

    /*
     * When to screen is off make sure is working in standby mode and reduce CPU usage
     */
    if (!mPowerManagement.getScreenOn()) {
        if (mCurrentState != STATE_PAUSED && mCurrentState != STATE_STANDBY) {
            mHandler.post(new Runnable() {
                @Override
                public void run() {
                    standby();
                }
            });
        }
        mPowerManagement.sleep();
    }

    /* Here is in RUNNING or in STANDBY state */

    int pictRotation = mOrientationManager.getPictureRotation();

    // set preview rotation
    mCameraListener.setPreviewRotation(pictRotation);

    if (mCurrentState == STATE_PAUSED)
        return;

    /*
     * call jni part to track face
     */
    mMotion.x = mMotion.y = 0.0f;
    boolean faceDetected = VisionPipeline.processFrame(rgba.getNativeObjAddr(), pictRotation, mMotion);

    /*
     * Check whether need to pause/resume the engine according
     * to the face detection status
     */
    if (faceDetected) {
        mFaceDetectionCountdown.start();
        if (mCurrentState == STATE_STANDBY) {
            mHandler.post(new Runnable() {
                @Override
                public void run() {
                    resume();
                }
            });

            /* Yield CPU to the main thread so that it has the opportunity
             * to run and change the engine state before this thread continue
             * running.
             * Remarks: tried Thread.yield() without success
             */
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                /* do nothing */ }
        }
    }

    if (mCurrentState == STATE_STANDBY)
        return;

    if (mFaceDetectionCountdown.hasFinished() && !mFaceDetectionCountdown.isDisabled()) {
        mHandler.post(new Runnable() {
            @Override
            public void run() {
                Resources res = mService.getResources();
                String t = String.format(res.getString(R.string.pointer_stopped_toast),
                        Preferences.getTimeWithoutDetectionEntryValue(mService));
                EVIACAM.LongToast(mService, t);

                standby();
            }
        });
    }

    // Provide feedback through the camera viewer
    mCameraLayerView.updateFaceDetectorStatus(mFaceDetectionCountdown);

    // compensate mirror effect
    mMotion.x = -mMotion.x;

    // process motion on specific engine
    mMotionProcessor.processMotion(mMotion);
}

From source file:com.nekomeshi312.whiteboardcorrection.DetectLines.java

License:Open Source License

public int lineDetect(Mat inputImg, boolean isYUV, Mat outputImg, int lines[]) {
    if (!mIsInit)
        return -1;
    return detectLines(inputImg.getNativeObjAddr(), isYUV, outputImg == null ? 0 : outputImg.getNativeObjAddr(),
            lines);/*from www  . j a v  a 2  s.co m*/
}

From source file:com.projectcs2103t.openglestest.OpenGLES20Activity.java

License:Apache License

private void initNative() {
    String secStore = System.getenv("EXTERNAL_STORAGE");//internal sd
    System.out.println("external storage:" + secStore);
    String modelFile = secStore + "/virtual/model.ply";
    File f = new File(modelFile);
    System.out.println("file existence = " + f.exists());
    System.out.println("file readable = " + f.canRead());

    nl = new NativeLinker();
    nl.loadModel(modelFile);// ww w  .j av a  2  s . c o m
    try {
        Mat patternImg = Utils.loadResource(OpenGLES20Activity.this, R.drawable.pattern1, 1);
        nl.loadPattern(patternImg.getNativeObjAddr());
        patternImg = Utils.loadResource(OpenGLES20Activity.this, R.drawable.pattern2, 1);
        nl.loadPattern(patternImg.getNativeObjAddr());
        patternImg = Utils.loadResource(OpenGLES20Activity.this, R.drawable.pattern3, 1);
        nl.loadPattern(patternImg.getNativeObjAddr());
    } catch (Exception e) {
    }
}

From source file:com.projectcs2103t.openglestest.OpenGLES20Activity.java

License:Apache License

@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat rgba = inputFrame.rgba();
    float projection[] = mCameraProjectionAdapter.getProjectionGL();
    Mat CameraMat = mCameraProjectionAdapter.getCVCameraMat();
    Mat DistortionMat = mCameraProjectionAdapter.getCVDistortionMat();
    Mat ModelViewMat = new Mat(4, 4, CvType.CV_64FC1);
    int detected = nl.processFrame(rgba.getNativeObjAddr(), CameraMat.getNativeObjAddr(),
            DistortionMat.getNativeObjAddr(), ModelViewMat.getNativeObjAddr());
    float mGLModelView[] = null;
    if (detected == 1) {
        mGLModelView = new float[16];
        mGLModelView[0] = (float) ModelViewMat.get(0, 0)[0];
        mGLModelView[1] = (float) ModelViewMat.get(0, 1)[0];
        mGLModelView[2] = (float) ModelViewMat.get(0, 2)[0];
        mGLModelView[3] = (float) ModelViewMat.get(0, 3)[0];
        mGLModelView[4] = (float) ModelViewMat.get(1, 0)[0];
        mGLModelView[5] = (float) ModelViewMat.get(1, 1)[0];
        mGLModelView[6] = (float) ModelViewMat.get(1, 2)[0];
        mGLModelView[7] = (float) ModelViewMat.get(1, 3)[0];
        mGLModelView[8] = (float) ModelViewMat.get(2, 0)[0];
        mGLModelView[9] = (float) ModelViewMat.get(2, 1)[0];
        mGLModelView[10] = (float) ModelViewMat.get(2, 2)[0];
        mGLModelView[11] = (float) ModelViewMat.get(2, 3)[0];
        mGLModelView[12] = (float) ModelViewMat.get(3, 0)[0];
        mGLModelView[13] = (float) ModelViewMat.get(3, 1)[0];
        mGLModelView[14] = (float) ModelViewMat.get(3, 2)[0];
        mGLModelView[15] = (float) ModelViewMat.get(3, 3)[0];
        //showMatrices(rgba, ModelViewMat);
    }/*from w  ww. ja v a 2s .com*/
    mCameraProjectionAdapter.setModelViewGL(mGLModelView);
    Imgproc.putText(rgba, mCameraProjectionAdapter.toString(), new Point(50, 50), Core.FONT_HERSHEY_PLAIN, 1.0,
            new Scalar(0, 255, 0));
    Imgproc.putText(rgba, mGLView.toString(), new Point(50, 75), Core.FONT_HERSHEY_PLAIN, 1.0,
            new Scalar(0, 255, 0));
    return rgba;
}

From source file:com.team.formal.eyeshopping.ActivityFindingResults.java

License:Open Source License

private void callCloudVision(final Bitmap bitmap) throws IOException {
    // Do the real work in an async task, because we need to use the network anyway
    new AsyncTask<Object, Void, ArrayList<String>>() {
        final ProgressDialog asyncDialog = new ProgressDialog(ActivityFindingResults.this);

        @Override/* w ww . j av a 2  s . co  m*/
        protected void onPreExecute() {
            super.onPreExecute();
            asyncDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
            asyncDialog.setMessage("Loading Products ...");
            asyncDialog.show();
        }

        @Override
        protected ArrayList<String> doInBackground(Object... params) {
            try {
                HttpTransport httpTransport = AndroidHttp.newCompatibleTransport();
                JsonFactory jsonFactory = GsonFactory.getDefaultInstance();

                VisionRequestInitializer requestInitializer = new VisionRequestInitializer(
                        CLOUD_VISION_API_KEY) {
                    /**
                     * We override this so we can inject important identifying fields into the HTTP
                     * headers. This enables use of a restricted cloud platform API key.
                     */
                    @Override
                    protected void initializeVisionRequest(VisionRequest<?> visionRequest) throws IOException {
                        super.initializeVisionRequest(visionRequest);

                        String packageName = getPackageName();
                        visionRequest.getRequestHeaders().set(ANDROID_PACKAGE_HEADER, packageName);

                        String sig = PackageManagerUtils.getSignature(getPackageManager(), packageName);

                        visionRequest.getRequestHeaders().set(ANDROID_CERT_HEADER, sig);
                    }
                };

                Vision.Builder builder = new Vision.Builder(httpTransport, jsonFactory, null);
                builder.setVisionRequestInitializer(requestInitializer);

                Vision vision = builder.build();

                BatchAnnotateImagesRequest batchAnnotateImagesRequest = new BatchAnnotateImagesRequest();
                batchAnnotateImagesRequest.setRequests(new ArrayList<AnnotateImageRequest>() {
                    {
                        AnnotateImageRequest annotateImageRequest = new AnnotateImageRequest();

                        // Add the image
                        Image base64EncodedImage = new Image();
                        // Convert the bitmap to a JPEG
                        // Just in case it's a format that Android understands but Cloud Vision
                        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
                        bitmap.compress(Bitmap.CompressFormat.JPEG, 90, byteArrayOutputStream);
                        byte[] imageBytes = byteArrayOutputStream.toByteArray();

                        // Base64 encode the JPEG
                        base64EncodedImage.encodeContent(imageBytes);
                        annotateImageRequest.setImage(base64EncodedImage);
                        // add the features we want
                        annotateImageRequest.setFeatures(new ArrayList<Feature>() {
                            {
                                Feature webDetection = new Feature();
                                webDetection.setType("WEB_DETECTION");
                                webDetection.setMaxResults(20);
                                add(webDetection);
                            }
                        });

                        // Add the list of one thing to the request
                        add(annotateImageRequest);
                    }
                });

                Vision.Images.Annotate annotateRequest = vision.images().annotate(batchAnnotateImagesRequest);
                // Due to a bug: requests to Vision API containing large images fail when GZipped.
                annotateRequest.setDisableGZipContent(true);
                Log.d(TAG, "created Cloud Vision request object, sending request");

                BatchAnnotateImagesResponse response = annotateRequest.execute();

                return convertResponseToString(response);
            } catch (GoogleJsonResponseException e) {
                Log.d(TAG, "failed to make API request because " + e.getContent());
            } catch (IOException e) {
                Log.d(TAG, "failed to make API request because of other IOException " + e.getMessage());
            }
            return null;
        }

        @Override
        protected void onPostExecute(ArrayList<String> urls) {
            super.onPostExecute(urls);
            //   urls .

            for (int i = 0; i < urls.size(); i++) {
                Log.d("pages", urls.get(i));
            }
            ArrayList<String[]> parsedUrl = urlParsing(urls);
            Map<String, Integer> map = new HashMap<String, Integer>();

            // 1. url  /   _-
            for (int i = 0; i < parsedUrl.size(); i++) {
                for (int j = 0; j < parsedUrl.get(i).length; j++) {
                    System.out.println(parsedUrl.get(i)[j]);
                    Integer count = map.get(parsedUrl.get(i)[j]);
                    map.put(parsedUrl.get(i)[j], (count == null) ? 1 : count + 1);
                }
                System.out.println("");
            }

            //   ,   keyword    
            //  .
            //   naver Shop api 
            ArrayList<String> rankCount = new ArrayList<>();
            ArrayList<ArrayList<String>> resultArr = new ArrayList<ArrayList<String>>();

            for (Map.Entry<String, Integer> entry : map.entrySet()) {
                if (entry.getValue() >= 4) {
                    System.out.println("keyword : " + entry.getKey() + " Count : " + entry.getValue());
                    rankCount.add(entry.getKey());
                }
            }

            final ArrayList<String> coreKeywords = new ArrayList<>();

            if (!rankCount.isEmpty()) {
                for (int k = 0; k < 7; k++) {
                    int randomCount = randomRange(1, rankCount.size());
                    boolean checkDuplicate[] = new boolean[rankCount.size()];
                    String combiw = "";
                    for (int i = 0; i < randomCount; i++) {
                        int rand1;
                        while (checkDuplicate[(rand1 = randomRange(0, rankCount.size() - 1))]) {
                        }
                        combiw += rankCount.get(rand1) + "%20";
                        Log.d("combi", combiw);
                        checkDuplicate[rand1] = true;
                    }
                    coreKeywords.add(combiw);
                }
                for (int i = 0; i < coreKeywords.size(); i++) {
                    Log.d("coreKey", coreKeywords.get(i));
                }
                //                    for (String arr[] : parsedUrl) {
                //                        int count = 0;
                //                        boolean check[] = new boolean[arr.length];
                //                        ArrayList<String> strArr = new ArrayList<>();
                //
                //                        for (int i = 0; i < arr.length; i++) {
                //                            for (String rank : rankCount) {
                //                                if (arr[i].equals(rank)) {
                //                                    check[i] = true;
                //                                    strArr.add(arr[i]);
                //                                    count++;
                //                                }
                //                            }
                //                            Log.d("strArr", strArr.toString());
                //                        }
                //
                //                        int rand;
                //                        int randSize = randomRange(1, arr.length - count);
                //                        for (int i = 0; i < randSize; i++) {
                //                            while (check[(rand = randomRange(0, arr.length - 1))]) {
                //                            }
                //                            strArr.add(arr[rand]);
                //                            check[rand] = true;
                //                        }
                //                        resultArr.add(strArr);
                //                        Log.d("raa", resultArr.toString());
                //                    }
            } // end of isEmpty()

            new AsyncTask<Object, Void, List<Shop>>() {
                @Override
                protected List<Shop> doInBackground(Object... params) {
                    List<Shop> results = new ArrayList<>();

                    if (results.size() > 5)
                        results = results.subList(0, 5);

                    for (int i = 0; i < coreKeywords.size(); i++) {
                        System.out.println(coreKeywords.get(i).toString().replaceAll(", ", "%20"));
                        Log.d("uri", coreKeywords.get(i).toString().replaceAll(", ", "%20"));

                        final String xmlRaw = coreKeywords.get(i).toString().replaceAll(", ", "%20");

                        // 1
                        URL url = null;
                        try {
                            url = new URL("https://openapi.naver.com/v1/search/shop.xml?query=" + xmlRaw
                                    + "&display=50");
                        } catch (MalformedURLException e) {
                            e.printStackTrace();
                        }

                        HttpURLConnection urlConnection = null;
                        try {
                            urlConnection = (HttpURLConnection) url.openConnection();
                            urlConnection.setRequestProperty("X-Naver-Client-ID", clientID);
                            urlConnection.setRequestProperty("X-Naver-Client-Secret", clientSecret);
                            urlConnection.setRequestProperty("User-Agent", "Mozilla/5.0 ( compatible ) ");
                            urlConnection.setRequestProperty("Accept", "*/*");
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        InputStream in = null;
                        try {
                            in = new BufferedInputStream(urlConnection.getInputStream());
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        String data = "";
                        String msg = null;

                        BufferedReader br = null;
                        try {
                            if (in != null) {
                                br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
                            }
                        } catch (UnsupportedEncodingException e) {
                            e.printStackTrace();
                        }

                        try {
                            if (br != null) {
                                while ((msg = br.readLine()) != null) {
                                    data += msg;
                                }
                            }
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        Log.i("msg of br: ", data);

                        // 2
                        String shopResult = data;
                        try {
                            List<Shop> parsingResult = parsingShopResultXml(shopResult);
                            if (parsingResult.size() > 5)
                                parsingResult = parsingResult.subList(0, 5);

                            for (final Shop shop : parsingResult) {
                                Bitmap thumbImg = getBitmapFromURL(shop.getImage());
                                if (thumbImg != null) {
                                    ArrayList<String> keywords = new ArrayList<>(Arrays.asList(coreKeywords
                                            .get(i).replace("[", "").replace("]", "").split("%20")));
                                    String combinationKeyword = coreKeywords.get(i).replace("[", "")
                                            .replace("]", "").replaceAll("%20", " ");

                                    shop.setThumbBmp(thumbImg); //  ? Url
                                    shop.setCombinationKeyword(combinationKeyword);
                                    shop.setKeywords(keywords);
                                    results.add(shop);
                                }
                            }

                            if (results.size() > 10) // must be
                                results = results.subList(0, 10);

                            for (Shop dummyShop : results) {
                                mNaverPrImg = dummyShop.getThumbBmp();
                                Mat userSelImgTarget = new Mat(userSelImg.width(), userSelImg.height(),
                                        CvType.CV_8UC4);
                                Mat naverPrImgTarget = new Mat(mNaverPrImg.getWidth(), mNaverPrImg.getHeight(),
                                        CvType.CV_8UC4);

                                Utils.bitmapToMat(mNaverPrImg, naverPrImgTarget);
                                Imgproc.cvtColor(userSelImg, userSelImgTarget, Imgproc.COLOR_BGR2RGB);
                                Imgproc.cvtColor(naverPrImgTarget, naverPrImgTarget, Imgproc.COLOR_RGBA2RGB);

                                int ret = AkazeFeatureMatching(userSelImg.getNativeObjAddr(),
                                        naverPrImgTarget.getNativeObjAddr());

                                if (ret == 1) { // find one!
                                    DecimalFormat df = new DecimalFormat("#,###");
                                    String num = df.format(dummyShop.getLprice());
                                    int exist_flag = 0;
                                    for (int ii = 0; ii < findingItems.size(); ii++) {
                                        if (findingItems.get(ii).getProductName()
                                                .equals(dummyShop.getTitle())) {
                                            exist_flag = 1;
                                            break;
                                        }
                                    }
                                    if (exist_flag == 0) {
                                        findingItems.add(new Results_GridItem(dummyShop.getTitle(), mNaverPrImg,
                                                " " + num + "?", dummyShop.getLprice(),
                                                dummyShop.getLink(), dummyShop.getKeywords(),
                                                dummyShop.getCombinationKeyword(), dummyShop.getImage()));
                                    }
                                }
                            }
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    } // end of for

                    return results;
                } // end of doinbackground

                @Override
                protected void onPostExecute(List<Shop> shops) {
                    super.onPostExecute(shops);

                    TextView t = (TextView) findViewById(R.id.loadingText);
                    t.setVisibility(View.GONE);
                    GridView g = (GridView) findViewById(R.id.list_view);
                    g.setVisibility(View.VISIBLE);

                    if (findingItems.size() == 0) {
                        TextView tLoad = (TextView) findViewById(R.id.loadingText);
                        tLoad.setText("   .");
                        tLoad.setVisibility(View.VISIBLE);
                        gridView.setVisibility(View.GONE);
                    } else {
                        Log.d(TAG, "finding Size!!!!" + Integer.toString(findingItems.size()));
                        Collections.sort(findingItems, new Comparator<Results_GridItem>() {
                            @Override
                            public int compare(Results_GridItem o1, Results_GridItem o2) {
                                if (o1.getPrice() > o2.getPrice()) {
                                    return 1;
                                } else if (o1.getPrice() < o2.getPrice()) {
                                    return -1;
                                } else {
                                    return 0;
                                }
                            }
                        });
                        for (int i = 0; i < findingItems.size(); i++) {
                            Log.d(TAG, "List !! " + Integer.toString(findingItems.get(i).getPrice()));
                        }
                        Log.d(TAG, "finding Size!!!!" + Integer.toString(findingItems.size()));
                        gridViewAdapter = new GridViewAdapter(getApplicationContext(), findingItems);
                        gridView.setAdapter(gridViewAdapter);
                    }
                    asyncDialog.dismiss();
                }
            }.execute();

        } // end of PostExcute
    }.execute();
}

From source file:edu.ceta.vision.android.topcode.ScannerAndroidNative.java

License:Open Source License

public List<TopCode> scan(Mat img, boolean isColorImage) {
    TopCode[] array = scanNativeMat(img.getNativeObjAddr(), isColorImage);
    return new ArrayList<TopCode>(Arrays.asList(array));
}

From source file:edu.sfsu.cs.orange.ocr.OcrRecognizeAsyncTask.java

License:Apache License

@Override
protected Boolean doInBackground(Void... arg0) {
    long start = System.currentTimeMillis();
    Bitmap bitmap = activity.getCameraManager().buildLuminanceSource(data, width, height)
            .renderCroppedGreyscaleBitmap();

    String textResult;/*from w ww.  j  a  v  a  2  s.co  m*/
    Mat image = new Mat();
    Utils.bitmapToMat(bitmap, image);
    Mat gray = new Mat();
    Utils.bitmapToMat(bitmap, gray);

    Mat background = new Mat();
    Utils.bitmapToMat(bitmap, background); //to test with BinarizeBG
    Mat finalimage = new Mat();
    Utils.bitmapToMat(bitmap, finalimage);

    //image.convertTo( gray,CvType.CV_8UC1);
    //image.convertTo(image,CvType.CV_64F);
    try {
        Imgcodecs.imwrite("/storage/emulated/0/DCIM/orig.jpg", image);
        OpencvNativeClass.BinarizeShafait(gray.getNativeObjAddr(), image.getNativeObjAddr());

        Imgcodecs.imwrite("/storage/emulated/0/DCIM/binarized.jpg", image);
        Utils.matToBitmap(image, bitmap);

        //Pix fimage = ReadFile.readBitmap(bitmap);
        //fimage = Binarize.otsuAdaptiveThreshold(fimage);

        //float angle = Skew.findSkew(fimage);
        //Log.i("Skew: ", Float.toString(angle));
        //double deg2rad = 3.14159265 / 180.;

        //fimage = Rotate.rotate(fimage, angle);

        //bitmap = WriteFile.writeBitmap(fimage);

        Mat skewed = new Mat();

        //Utils.bitmapToMat(bitmap,skewed);
        //Imgcodecs.imwrite("/storage/emulated/0/DCIM/deskewed.jpg", skewed);

        baseApi.setImage(ReadFile.readBitmap(bitmap));

        textResult = baseApi.getUTF8Text();
        timeRequired = System.currentTimeMillis() - start;

        // Check for failure to recognize text
        if (textResult == null || textResult.equals("")) {
            return false;
        }

        ocrResult = new OcrResult();
        ocrResult.setWordConfidences(baseApi.wordConfidences());
        ocrResult.setMeanConfidence(baseApi.meanConfidence());
        ocrResult.setRegionBoundingBoxes(baseApi.getRegions().getBoxRects());
        ocrResult.setTextlineBoundingBoxes(baseApi.getTextlines().getBoxRects());
        ocrResult.setWordBoundingBoxes(baseApi.getWords().getBoxRects());
        ocrResult.setStripBoundingBoxes(baseApi.getStrips().getBoxRects());

        // Iterate through the results.
        final ResultIterator iterator = baseApi.getResultIterator();
        int[] lastBoundingBox;
        ArrayList<Rect> charBoxes = new ArrayList<Rect>();
        iterator.begin();
        do {
            lastBoundingBox = iterator.getBoundingBox(PageIteratorLevel.RIL_SYMBOL);
            Rect lastRectBox = new Rect(lastBoundingBox[0], lastBoundingBox[1], lastBoundingBox[2],
                    lastBoundingBox[3]);
            charBoxes.add(lastRectBox);
        } while (iterator.next(PageIteratorLevel.RIL_SYMBOL));
        iterator.delete();
        ocrResult.setCharacterBoundingBoxes(charBoxes);

    } catch (RuntimeException e) {
        Log.e("OcrRecognizeAsyncTask",
                "Caught RuntimeException in request to Tesseract. Setting state to CONTINUOUS_STOPPED.");
        e.printStackTrace();
        try {
            baseApi.clear();
            activity.stopHandler();
        } catch (NullPointerException e1) {
            // Continue
        }
        return false;
    }
    timeRequired = System.currentTimeMillis() - start;
    ocrResult.setBitmap(bitmap);
    String[] temp = textResult.split("\n");
    if (temp.length != 0)
        textResult = "";
    for (int i = 0; i < temp.length; i++) {
        if (temp[i].length() != 0) {
            if (i < temp.length - 1) {
                textResult = textResult + temp[i] + "\n";
            } else
                textResult = textResult + temp[i];
        }
    }
    String textResult2 = ParsingNativeClass.ParseAddress(textResult);
    Log.d("Return parsing", textResult2);
    ocrResult.setViewtext(textResult);
    ocrResult.setText(textResult2);
    ocrResult.setRecognitionTimeRequired(timeRequired);
    return true;
}